mirror of
https://github.com/home-assistant/core.git
synced 2026-02-26 07:40:21 +00:00
Compare commits
11 Commits
knx-name-r
...
tibber_ref
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9b984d705 | ||
|
|
886c0578e7 | ||
|
|
02e579c5ae | ||
|
|
d47f3ca1d8 | ||
|
|
02e5f2c234 | ||
|
|
e42195bfed | ||
|
|
b2944a6d66 | ||
|
|
03d15fb70c | ||
|
|
01d57ddcf1 | ||
|
|
cfc85cfd29 | ||
|
|
ca2dc20709 |
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -9,5 +9,3 @@ updates:
|
||||
labels:
|
||||
- dependency
|
||||
- github_actions
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
117
.github/workflows/builder.yml
vendored
117
.github/workflows/builder.yml
vendored
@@ -18,19 +18,11 @@ env:
|
||||
BASE_IMAGE_VERSION: "2026.01.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize build
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
@@ -49,16 +41,16 @@ jobs:
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Verify version
|
||||
uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/verify-version@master
|
||||
with:
|
||||
ignore-dev: true
|
||||
|
||||
@@ -92,9 +84,9 @@ jobs:
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -143,12 +135,11 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
UV_PRERELEASE: allow
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install packaging tomli
|
||||
uv pip install .
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${VERSION}"
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
|
||||
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
|
||||
@@ -194,7 +185,7 @@ jobs:
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${GITHUB_SHA};${GITHUB_REF};${GITHUB_EVENT_NAME};${GITHUB_ACTOR}" > rootfs/OFFICIAL_IMAGE
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -214,32 +205,26 @@ jobs:
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${ARCH}-homeassistant-base:${BASE_IMAGE_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${ARCH}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
env:
|
||||
BASE_IMAGE: ${{ steps.vars.outputs.base_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${BASE_IMAGE}"
|
||||
"${{ steps.vars.outputs.base_image }}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
env:
|
||||
CACHE_IMAGE: ${{ steps.vars.outputs.cache_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${CACHE_IMAGE}"
|
||||
"${{ steps.vars.outputs.cache_image }}"
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
@@ -261,12 +246,8 @@ jobs:
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
DIGEST: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${ARCH}-homeassistant:${VERSION}@${DIGEST}"
|
||||
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -274,9 +255,9 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
machine:
|
||||
@@ -301,13 +282,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set build additional args
|
||||
env:
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
# Create general tags
|
||||
if [[ "${VERSION}" =~ d ]]; then
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
|
||||
elif [[ "${VERSION}" =~ b ]]; then
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
|
||||
else
|
||||
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
|
||||
@@ -320,8 +299,9 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@21bc64d76dad7a5184c67826aab41c6b6f89023a # 2025.11.0
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -335,8 +315,6 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: ["init", "build_machine"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -344,14 +322,14 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
with:
|
||||
name: ${{ secrets.GIT_NAME }}
|
||||
email: ${{ secrets.GIT_EMAIL }}
|
||||
token: ${{ secrets.GIT_TOKEN }}
|
||||
|
||||
- name: Update version file
|
||||
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
with:
|
||||
key: "homeassistant[]"
|
||||
key-description: "Home Assistant Core"
|
||||
@@ -361,7 +339,7 @@ jobs:
|
||||
|
||||
- name: Update version file (stable -> beta)
|
||||
if: needs.init.outputs.channel == 'stable'
|
||||
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/version-push@master
|
||||
with:
|
||||
key: "homeassistant[]"
|
||||
key-description: "Home Assistant Core"
|
||||
@@ -376,9 +354,9 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -405,17 +383,14 @@ jobs:
|
||||
|
||||
- name: Verify architecture image signatures
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
|
||||
@@ -446,19 +421,16 @@ jobs:
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
for attempt in 1 2 3; do
|
||||
if docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
|
||||
break
|
||||
fi
|
||||
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
|
||||
@@ -468,28 +440,23 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
env:
|
||||
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
|
||||
REGISTRY: ${{ matrix.registry }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
META_TAGS: ${{ steps.meta.outputs.tags }}
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
done
|
||||
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${META_TAGS}"
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
@@ -513,8 +480,8 @@ jobs:
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
id-token: write # For PyPI trusted publishing
|
||||
contents: read
|
||||
id-token: write
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -554,10 +521,10 @@ jobs:
|
||||
name: Build and test hassfest image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
attestations: write # For build provenance attestation
|
||||
id-token: write # For build provenance attestation
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
needs: ["init"]
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
env:
|
||||
@@ -585,7 +552,7 @@ jobs:
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
|
||||
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
|
||||
284
.github/workflows/ci.yaml
vendored
284
.github/workflows/ci.yaml
vendored
@@ -67,8 +67,6 @@ env:
|
||||
PYTHONASYNCIODEBUG: 1
|
||||
HASS_CI: 1
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
@@ -77,9 +75,6 @@ jobs:
|
||||
info:
|
||||
name: Collect information & changes data
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
pull-requests: read # For paths-filter to detect changed files
|
||||
outputs:
|
||||
# In case of issues with the partial run, use the following line instead:
|
||||
# test_full_suite: 'true'
|
||||
@@ -106,20 +101,19 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
env:
|
||||
HASH_REQUIREMENTS_TEST: ${{ hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}
|
||||
HASH_REQUIREMENTS: ${{ hashFiles('requirements.txt') }}
|
||||
HASH_REQUIREMENTS_ALL: ${{ hashFiles('requirements_all.txt') }}
|
||||
HASH_PACKAGE_CONSTRAINTS: ${{ hashFiles('homeassistant/package_constraints.txt') }}
|
||||
HASH_GEN_REQUIREMENTS: ${{ hashFiles('script/gen_requirements_all.py') }}
|
||||
run: |
|
||||
# Include HA_SHORT_VERSION to force the immediate creation
|
||||
# of a new uv cache entry after a version bump.
|
||||
echo "key=venv-${CACHE_VERSION}-${HA_SHORT_VERSION}-${HASH_REQUIREMENTS_TEST}-${HASH_REQUIREMENTS}-${HASH_REQUIREMENTS_ALL}-${HASH_PACKAGE_CONSTRAINTS}-${HASH_GEN_REQUIREMENTS}" >> $GITHUB_OUTPUT
|
||||
echo "key=venv-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-${{
|
||||
hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{
|
||||
hashFiles('requirements.txt') }}-${{
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${CACHE_VERSION}-${HA_SHORT_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: core
|
||||
@@ -142,18 +136,6 @@ jobs:
|
||||
filters: .integration_paths.yaml
|
||||
- name: Collect additional information
|
||||
id: info
|
||||
env:
|
||||
INTEGRATION_CHANGES: ${{ steps.integrations.outputs.changes }}
|
||||
CORE_ANY: ${{ steps.core.outputs.any }}
|
||||
INPUT_FULL: ${{ github.event.inputs.full }}
|
||||
HAS_CI_FULL_RUN_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'ci-full-run') }}
|
||||
INPUT_LINT_ONLY: ${{ github.event.inputs.lint-only }}
|
||||
INPUT_PYLINT_ONLY: ${{ github.event.inputs.pylint-only }}
|
||||
INPUT_MYPY_ONLY: ${{ github.event.inputs.mypy-only }}
|
||||
INPUT_AUDIT_LICENSES_ONLY: ${{ github.event.inputs.audit-licenses-only }}
|
||||
REPO_FULL_NAME: ${{ github.event.repository.full_name }}
|
||||
INPUT_SKIP_COVERAGE: ${{ github.event.inputs.skip-coverage }}
|
||||
HAS_CI_SKIP_COVERAGE_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}
|
||||
run: |
|
||||
# Defaults
|
||||
integrations_glob=""
|
||||
@@ -167,13 +149,14 @@ jobs:
|
||||
lint_only=""
|
||||
skip_coverage=""
|
||||
|
||||
if [[ "${INTEGRATION_CHANGES}" != "[]" ]];
|
||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||
then
|
||||
# Create a space-separated list of integrations
|
||||
integrations_glob=$(echo "${INTEGRATION_CHANGES}" | jq -r '. | join(" ")')
|
||||
# Create a file glob for the integrations
|
||||
integrations_glob=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '. | join(",")')
|
||||
[[ "${integrations_glob}" == *","* ]] && integrations_glob="{${integrations_glob}}"
|
||||
|
||||
# Create list of testable integrations
|
||||
possible_integrations=$(echo "${INTEGRATION_CHANGES}" | jq -cSr '.[]')
|
||||
possible_integrations=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '.[]')
|
||||
tests=$(
|
||||
for integration in ${possible_integrations};
|
||||
do
|
||||
@@ -189,8 +172,9 @@ jobs:
|
||||
# Test group count should be 1, we don't split partial tests
|
||||
test_group_count=1
|
||||
|
||||
# Create a space-separated list of test integrations
|
||||
tests_glob=$(echo "${tests}" | jq -r '. | join(" ")')
|
||||
# Create a file glob for the integrations tests
|
||||
tests_glob=$(echo "${tests}" | jq -cSr '. | join(",")')
|
||||
[[ "${tests_glob}" == *","* ]] && tests_glob="{${tests_glob}}"
|
||||
|
||||
mariadb_groups="[]"
|
||||
postgresql_groups="[]"
|
||||
@@ -199,12 +183,12 @@ jobs:
|
||||
|
||||
# We need to run the full suite on certain branches.
|
||||
# Or, in case core files are touched, for the full suite as well.
|
||||
if [[ "${GITHUB_REF}" == "refs/heads/dev" ]] \
|
||||
|| [[ "${GITHUB_REF}" == "refs/heads/master" ]] \
|
||||
|| [[ "${GITHUB_REF}" == "refs/heads/rc" ]] \
|
||||
|| [[ "${CORE_ANY}" == "true" ]] \
|
||||
|| [[ "${INPUT_FULL}" == "true" ]] \
|
||||
|| [[ "${HAS_CI_FULL_RUN_LABEL}" == "true" ]];
|
||||
if [[ "${{ github.ref }}" == "refs/heads/dev" ]] \
|
||||
|| [[ "${{ github.ref }}" == "refs/heads/master" ]] \
|
||||
|| [[ "${{ github.ref }}" == "refs/heads/rc" ]] \
|
||||
|| [[ "${{ steps.core.outputs.any }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.full }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-full-run') }}" == "true" ]];
|
||||
then
|
||||
mariadb_groups=${MARIADB_VERSIONS}
|
||||
postgresql_groups=${POSTGRESQL_VERSIONS}
|
||||
@@ -213,19 +197,19 @@ jobs:
|
||||
test_full_suite="true"
|
||||
fi
|
||||
|
||||
if [[ "${INPUT_LINT_ONLY}" == "true" ]] \
|
||||
|| [[ "${INPUT_PYLINT_ONLY}" == "true" ]] \
|
||||
|| [[ "${INPUT_MYPY_ONLY}" == "true" ]] \
|
||||
|| [[ "${INPUT_AUDIT_LICENSES_ONLY}" == "true" ]] \
|
||||
|| [[ "${GITHUB_EVENT_NAME}" == "push" \
|
||||
&& "${REPO_FULL_NAME}" != "home-assistant/core" ]];
|
||||
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||
|| [[ "${{ github.event_name }}" == "push" \
|
||||
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||
then
|
||||
lint_only="true"
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
if [[ "${INPUT_SKIP_COVERAGE}" == "true" ]] \
|
||||
|| [[ "${HAS_CI_SKIP_COVERAGE_LABEL}" == "true" ]];
|
||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||
then
|
||||
skip_coverage="true"
|
||||
fi
|
||||
@@ -257,8 +241,6 @@ jobs:
|
||||
prek:
|
||||
name: Run prek checks
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs: [info]
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
@@ -278,34 +260,12 @@ jobs:
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
zizmor:
|
||||
name: Check GitHub Actions workflows
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
needs: [info]
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run zizmor
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
with:
|
||||
extra-args: --all-files zizmor
|
||||
|
||||
lint-hadolint:
|
||||
name: Check ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs: [info]
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
@@ -327,15 +287,13 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check ${{ matrix.file }}
|
||||
uses: docker://hadolint/hadolint:v2.12.0@sha256:30a8fd2e785ab6176eed53f74769e04f125afb2f74a6c52aef7d463583b6d45e
|
||||
uses: docker://hadolint/hadolint:v2.12.0
|
||||
with:
|
||||
args: hadolint ${{ matrix.file }}
|
||||
|
||||
base:
|
||||
name: Prepare dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs: [info]
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
@@ -357,7 +315,8 @@ jobs:
|
||||
run: |
|
||||
uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3)
|
||||
echo "version=${uv_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
@@ -393,21 +352,19 @@ jobs:
|
||||
steps.cache-venv.outputs.cache-hit != 'true'
|
||||
|| steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
APT_CACHE_HIT: ${{ steps.cache-apt-check.outputs.cache-hit }}
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
if [[ "${APT_CACHE_HIT}" != 'true' ]]; then
|
||||
mkdir -p ${APT_CACHE_DIR}
|
||||
mkdir -p ${APT_LIST_CACHE_DIR}
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -421,8 +378,8 @@ jobs:
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${APT_CACHE_HIT}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${APT_CACHE_BASE}
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
@@ -469,8 +426,6 @@ jobs:
|
||||
hassfest:
|
||||
name: Check hassfest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -493,11 +448,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -526,8 +481,6 @@ jobs:
|
||||
gen-requirements-all:
|
||||
name: Check all requirements
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -563,8 +516,6 @@ jobs:
|
||||
gen-copilot-instructions:
|
||||
name: Check copilot instructions
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
if: |
|
||||
@@ -589,8 +540,6 @@ jobs:
|
||||
dependency-review:
|
||||
name: Dependency review
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -612,8 +561,6 @@ jobs:
|
||||
audit-licenses:
|
||||
name: Audit licenses
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -647,28 +594,22 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Extract license data
|
||||
env:
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${PYTHON_VERSION}.json
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
- name: Check licenses
|
||||
env:
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses check licenses-${PYTHON_VERSION}.json
|
||||
python -m script.licenses check licenses-${{ matrix.python-version }}.json
|
||||
|
||||
pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -709,18 +650,14 @@ jobs:
|
||||
- name: Run pylint (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
env:
|
||||
INTEGRATIONS_GLOB: ${{ needs.info.outputs.integrations_glob }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint --ignore-missing-annotations=y $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
|
||||
pylint --ignore-missing-annotations=y homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||
|
||||
pylint-tests:
|
||||
name: Check pylint on tests
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -762,18 +699,14 @@ jobs:
|
||||
- name: Run pylint (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
env:
|
||||
TESTS_GLOB: ${{ needs.info.outputs.tests_glob }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint $(printf "tests/components/%s " ${TESTS_GLOB})
|
||||
pylint tests/components/${{ needs.info.outputs.tests_glob }}
|
||||
|
||||
mypy:
|
||||
name: Check mypy
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -796,8 +729,9 @@ jobs:
|
||||
id: generate-mypy-key
|
||||
run: |
|
||||
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
|
||||
echo "version=${mypy_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
@@ -830,18 +764,14 @@ jobs:
|
||||
- name: Run mypy (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
env:
|
||||
INTEGRATIONS_GLOB: ${{ needs.info.outputs.integrations_glob }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
mypy $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
|
||||
mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||
|
||||
prepare-pytest-full:
|
||||
name: Split tests for full run
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
@@ -867,11 +797,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg
|
||||
@@ -895,11 +825,9 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run split_tests.py
|
||||
env:
|
||||
TEST_GROUP_COUNT: ${{ needs.info.outputs.test_group_count }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${TEST_GROUP_COUNT} tests
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
@@ -910,8 +838,6 @@ jobs:
|
||||
pytest-full:
|
||||
name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -943,11 +869,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -990,21 +916,18 @@ jobs:
|
||||
id: pytest-full
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
|
||||
TEST_GROUP: ${{ matrix.group }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
cov_params=()
|
||||
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
fi
|
||||
|
||||
echo "Test group ${TEST_GROUP}: $(sed -n "${TEST_GROUP},1p" pytest_buckets.txt)"
|
||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
@@ -1016,8 +939,8 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
$(sed -n "${TEST_GROUP},1p" pytest_buckets.txt) \
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
@@ -1053,11 +976,9 @@ jobs:
|
||||
pytest-mariadb:
|
||||
name: Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
mariadb:
|
||||
image: ${{ matrix.mariadb-group }} # zizmor: ignore[unpinned-images]
|
||||
image: ${{ matrix.mariadb-group }}
|
||||
ports:
|
||||
- 3306:3306
|
||||
env:
|
||||
@@ -1093,11 +1014,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1142,17 +1063,14 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
MARIADB_GROUP: ${{ matrix.mariadb-group }}
|
||||
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
mariadb=$(echo "${MARIADB_GROUP}" | sed "s/:/-/g")
|
||||
mariadb=$(echo "${{ matrix.mariadb-group }}" | sed "s/:/-/g")
|
||||
echo "mariadb=${mariadb}" >> $GITHUB_OUTPUT
|
||||
cov_params=()
|
||||
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
@@ -1174,7 +1092,7 @@ jobs:
|
||||
tests/components/logbook \
|
||||
tests/components/recorder \
|
||||
tests/components/sensor \
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${mariadb}.txt
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
@@ -1211,11 +1129,9 @@ jobs:
|
||||
pytest-postgres:
|
||||
name: Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: ${{ matrix.postgresql-group }} # zizmor: ignore[unpinned-images]
|
||||
image: ${{ matrix.postgresql-group }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
@@ -1251,11 +1167,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1302,17 +1218,14 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
POSTGRESQL_GROUP: ${{ matrix.postgresql-group }}
|
||||
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
postgresql=$(echo "${POSTGRESQL_GROUP}" | sed "s/:/-/g")
|
||||
postgresql=$(echo "${{ matrix.postgresql-group }}" | sed "s/:/-/g")
|
||||
echo "postgresql=${postgresql}" >> $GITHUB_OUTPUT
|
||||
cov_params=()
|
||||
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.recorder")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
@@ -1335,7 +1248,7 @@ jobs:
|
||||
tests/components/logbook \
|
||||
tests/components/recorder \
|
||||
tests/components/sensor \
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${postgresql}.txt
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
@@ -1372,8 +1285,6 @@ jobs:
|
||||
coverage-full:
|
||||
name: Upload test coverage to Codecov (full suite)
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- pytest-full
|
||||
@@ -1401,8 +1312,6 @@ jobs:
|
||||
pytest-partial:
|
||||
name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -1434,11 +1343,11 @@ jobs:
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${APT_CACHE_DIR} \
|
||||
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1478,22 +1387,19 @@ jobs:
|
||||
shell: bash
|
||||
env:
|
||||
PYTHONDONTWRITEBYTECODE: 1
|
||||
TEST_GROUP: ${{ matrix.group }}
|
||||
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
set -o pipefail
|
||||
|
||||
if [[ ! -f "tests/components/${TEST_GROUP}/__init__.py" ]]; then
|
||||
echo "::error:: missing file tests/components/${TEST_GROUP}/__init__.py"
|
||||
if [[ ! -f "tests/components/${{ matrix.group }}/__init__.py" ]]; then
|
||||
echo "::error:: missing file tests/components/${{ matrix.group }}/__init__.py"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cov_params=()
|
||||
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.${TEST_GROUP}")
|
||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||
cov_params+=(--cov-report=xml)
|
||||
cov_params+=(--cov-report=term-missing)
|
||||
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||
@@ -1510,8 +1416,8 @@ jobs:
|
||||
--durations-min=1 \
|
||||
-p no:sugar \
|
||||
--exclude-warning-annotations \
|
||||
tests/components/${TEST_GROUP} \
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
tests/components/${{ matrix.group }} \
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
@@ -1546,8 +1452,6 @@ jobs:
|
||||
name: Upload test coverage to Codecov (partial suite)
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
timeout-minutes: 10
|
||||
needs:
|
||||
- info
|
||||
@@ -1579,7 +1483,7 @@ jobs:
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
id-token: write # For Codecov OIDC upload
|
||||
id-token: write
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: |
|
||||
|
||||
12
.github/workflows/codeql.yml
vendored
12
.github/workflows/codeql.yml
vendored
@@ -5,8 +5,6 @@ on:
|
||||
schedule:
|
||||
- cron: "30 18 * * 4"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
@@ -17,9 +15,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 360
|
||||
permissions:
|
||||
actions: read # To read workflow information for CodeQL
|
||||
contents: read # To check out the repository
|
||||
security-events: write # To upload CodeQL results
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -28,11 +26,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
11
.github/workflows/detect-duplicate-issues.yml
vendored
11
.github/workflows/detect-duplicate-issues.yml
vendored
@@ -5,18 +5,13 @@ on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-duplicates:
|
||||
name: Detect duplicate issues
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To comment on and label issues
|
||||
models: read # For AI-based duplicate detection
|
||||
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
|
||||
11
.github/workflows/detect-non-english-issues.yml
vendored
11
.github/workflows/detect-non-english-issues.yml
vendored
@@ -5,18 +5,13 @@ on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-language:
|
||||
name: Detect non-English issues
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To comment on, label, and close issues
|
||||
models: read # For AI-based language detection
|
||||
|
||||
steps:
|
||||
- name: Check issue language
|
||||
|
||||
10
.github/workflows/lock.yml
vendored
10
.github/workflows/lock.yml
vendored
@@ -5,20 +5,10 @@ on:
|
||||
schedule:
|
||||
- cron: "0 * * * *"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
name: Lock inactive threads
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To lock issues
|
||||
pull-requests: write # To lock pull requests
|
||||
steps:
|
||||
- uses: dessant/lock-threads@7266a7ce5c1df01b1c6db85bf8cd86c737dadbe7 # v6.0.0
|
||||
with:
|
||||
|
||||
30
.github/workflows/restrict-task-creation.yml
vendored
30
.github/workflows/restrict-task-creation.yml
vendored
@@ -5,39 +5,9 @@ on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.issue.number }}
|
||||
|
||||
jobs:
|
||||
add-no-stale:
|
||||
name: Add no-stale label
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To add labels to issues
|
||||
if: >-
|
||||
github.event.issue.type.name == 'Task'
|
||||
|| github.event.issue.type.name == 'Epic'
|
||||
|| github.event.issue.type.name == 'Opportunity'
|
||||
steps:
|
||||
- name: Add no-stale label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['no-stale']
|
||||
});
|
||||
|
||||
check-authorization:
|
||||
name: Check authorization
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # To read CODEOWNERS file
|
||||
issues: write # To comment on, label, and close issues
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
|
||||
10
.github/workflows/stale.yml
vendored
10
.github/workflows/stale.yml
vendored
@@ -6,20 +6,10 @@ on:
|
||||
- cron: "0 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: Mark stale issues and PRs
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write # To label and close stale issues
|
||||
pull-requests: write # To label and close stale PRs
|
||||
steps:
|
||||
# The 60 day stale policy for PRs
|
||||
# Used for:
|
||||
|
||||
9
.github/workflows/translations.yml
vendored
9
.github/workflows/translations.yml
vendored
@@ -9,12 +9,6 @@ on:
|
||||
paths:
|
||||
- "**strings.json"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
@@ -35,7 +29,6 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Upload Translations
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
run: |
|
||||
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
|
||||
python3 -m script.translations upload
|
||||
|
||||
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -19,8 +19,6 @@ on:
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
cancel-in-progress: true
|
||||
@@ -53,7 +51,7 @@ jobs:
|
||||
|
||||
- name: Create requirements_diff file
|
||||
run: |
|
||||
if [[ "${GITHUB_EVENT_NAME}" =~ (schedule|workflow_dispatch) ]]; then
|
||||
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
|
||||
touch requirements_diff.txt
|
||||
else
|
||||
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/core/master/requirements.txt
|
||||
|
||||
@@ -17,12 +17,6 @@ repos:
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/zizmorcore/zizmor-pre-commit
|
||||
rev: v1.22.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
args:
|
||||
- --pedantic
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
|
||||
@@ -496,7 +496,6 @@ homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
homeassistant.components.spaceapi.*
|
||||
homeassistant.components.speedtestdotnet.*
|
||||
homeassistant.components.spotify.*
|
||||
homeassistant.components.sql.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1068,8 +1068,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mta/ @OnFreund
|
||||
/tests/components/mta/ @OnFreund
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: todo
|
||||
comment: https://developers.home-assistant.io/blog/2025/09/25/entity-services-api-changes/
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: todo
|
||||
comment: |
|
||||
Move coordinator from __init__.py to coordinator.py.
|
||||
Consider using entity descriptions for binary_sensor and switch.
|
||||
Consider simplifying climate supported features flow.
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Add mock_setup_entry common fixture.
|
||||
Test unique_id of the entry in happy flow.
|
||||
Split duplicate entry test from happy flow, use mock_config_entry.
|
||||
Error flow should end in CREATE_ENTRY to test recovery.
|
||||
Add data_description for ip_address (and port) to strings.json - tests fail with:
|
||||
"Translation not found for advantage_air: config.step.user.data_description.ip_address"
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Data descriptions missing
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities do not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data:
|
||||
status: done
|
||||
comment: Consider extending coordinator to access API via coordinator and remove extra dataclass.
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to be set.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: todo
|
||||
comment: MyZone temp entity should be unavailable when MyZone is disabled rather than returning None.
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Integration connects to local device without authentication.
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch the library instead of mocking at integration level.
|
||||
Split binary sensor tests into multiple tests (enable entities etc).
|
||||
Split tests into Creation (right entities with right values), Actions (right library calls), and Other behaviors.
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: todo
|
||||
comment: Consider making every zone its own device for better naming and room assignment. Breaking change to split cover entities to separate devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices, not discoverable.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Check mDNS, DHCP, SSDP confirmed not feasible. Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: AC zones are static per unit and configured on the device itself.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: Consider using UPDATE device class for app update binary sensor instead of custom.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: todo
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: UpdateFailed in the coordinator
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not raise repair issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Zones are part of the AC unit, not separate removable devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -120,7 +120,7 @@ class AOSmithWaterHeaterEntity(AOSmithStatusEntity, WaterHeaterEntity):
|
||||
return MODE_AOSMITH_TO_HA.get(self.device.status.current_mode, STATE_OFF)
|
||||
|
||||
@property
|
||||
def is_away_mode_on(self) -> bool:
|
||||
def is_away_mode_on(self):
|
||||
"""Return True if away mode is on."""
|
||||
return self.device.status.current_mode == AOSmithOperationMode.VACATION
|
||||
|
||||
|
||||
@@ -37,15 +37,15 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float:
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self.coordinator.atag.dhw.temperature
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str:
|
||||
def current_operation(self):
|
||||
"""Return current operation."""
|
||||
operation = self.coordinator.atag.dhw.current_operation
|
||||
return operation if operation in OPERATION_LIST else STATE_OFF
|
||||
return operation if operation in self.operation_list else STATE_OFF
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
@@ -53,7 +53,7 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
def target_temperature(self):
|
||||
"""Return the setpoint if water demand, otherwise return base temp (comfort level)."""
|
||||
return self.coordinator.atag.dhw.target_temperature
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.15.0", "openai==2.21.0"],
|
||||
"requirements": ["hass-nabucasa==1.13.0", "openai==2.21.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import functools
|
||||
import json
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from botocore.exceptions import BotoCoreError
|
||||
|
||||
@@ -190,77 +190,58 @@ class R2BackupAgent(BackupAgent):
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
|
||||
key = self._with_prefix(tar_filename)
|
||||
multipart_upload = await self._client.create_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
)
|
||||
upload_id = multipart_upload["UploadId"]
|
||||
try:
|
||||
parts: list[dict[str, Any]] = []
|
||||
part_number = 1
|
||||
buffer = bytearray() # bytes buffer to store the data
|
||||
offset = 0 # start index of unread data inside buffer
|
||||
|
||||
stream = await open_stream()
|
||||
async for chunk in stream:
|
||||
buffer.extend(chunk)
|
||||
|
||||
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
|
||||
# all non-trailing parts have the same size (defensive implementation)
|
||||
view = memoryview(buffer)
|
||||
try:
|
||||
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
start = offset
|
||||
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
|
||||
part_data = view[start:end]
|
||||
offset = end
|
||||
# upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
|
||||
# all non-trailing parts have the same size (required by S3/R2)
|
||||
while len(buffer) >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
part_data = bytes(buffer[:MULTIPART_MIN_PART_SIZE_BYTES])
|
||||
del buffer[:MULTIPART_MIN_PART_SIZE_BYTES]
|
||||
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d",
|
||||
part_number,
|
||||
len(part_data),
|
||||
)
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=part_data.tobytes(),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
finally:
|
||||
view.release()
|
||||
|
||||
# Compact the buffer if the consumed offset has grown large enough. This
|
||||
# avoids unnecessary memory copies when compacting after every part upload.
|
||||
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
buffer = bytearray(buffer[offset:])
|
||||
offset = 0
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d",
|
||||
part_number,
|
||||
len(part_data),
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=part_data,
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
|
||||
# Upload the final buffer as the last part (no minimum size requirement)
|
||||
# Offset should be 0 after the last compaction, but we use it as the start
|
||||
# index to be defensive in case the buffer was not compacted.
|
||||
if offset < len(buffer):
|
||||
remaining_data = memoryview(buffer)[offset:]
|
||||
if buffer:
|
||||
_LOGGER.debug(
|
||||
"Uploading final part number %d, size %d",
|
||||
part_number,
|
||||
len(remaining_data),
|
||||
"Uploading final part number %d, size %d", part_number, len(buffer)
|
||||
)
|
||||
part = await cast(Any, self._client).upload_part(
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=remaining_data.tobytes(),
|
||||
Body=bytes(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
|
||||
await cast(Any, self._client).complete_multipart_upload(
|
||||
await self._client.complete_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
@@ -269,7 +250,7 @@ class R2BackupAgent(BackupAgent):
|
||||
try:
|
||||
await self._client.abort_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=key,
|
||||
Key=self._with_prefix(tar_filename),
|
||||
UploadId=upload_id,
|
||||
)
|
||||
except BotoCoreError:
|
||||
|
||||
@@ -34,33 +34,20 @@ CONTROL4_CATEGORY = "comfort"
|
||||
# Control4 variable names
|
||||
CONTROL4_HVAC_STATE = "HVAC_STATE"
|
||||
CONTROL4_HVAC_MODE = "HVAC_MODE"
|
||||
CONTROL4_CURRENT_TEMPERATURE = "TEMPERATURE_F"
|
||||
CONTROL4_HUMIDITY = "HUMIDITY"
|
||||
CONTROL4_SCALE = "SCALE" # "FAHRENHEIT" or "CELSIUS"
|
||||
|
||||
# Temperature variables - Fahrenheit
|
||||
CONTROL4_CURRENT_TEMPERATURE_F = "TEMPERATURE_F"
|
||||
CONTROL4_COOL_SETPOINT_F = "COOL_SETPOINT_F"
|
||||
CONTROL4_HEAT_SETPOINT_F = "HEAT_SETPOINT_F"
|
||||
|
||||
# Temperature variables - Celsius
|
||||
CONTROL4_CURRENT_TEMPERATURE_C = "TEMPERATURE_C"
|
||||
CONTROL4_COOL_SETPOINT_C = "COOL_SETPOINT_C"
|
||||
CONTROL4_HEAT_SETPOINT_C = "HEAT_SETPOINT_C"
|
||||
|
||||
CONTROL4_COOL_SETPOINT = "COOL_SETPOINT_F"
|
||||
CONTROL4_HEAT_SETPOINT = "HEAT_SETPOINT_F"
|
||||
CONTROL4_FAN_MODE = "FAN_MODE"
|
||||
CONTROL4_FAN_MODES_LIST = "FAN_MODES_LIST"
|
||||
|
||||
VARIABLES_OF_INTEREST = {
|
||||
CONTROL4_HVAC_STATE,
|
||||
CONTROL4_HVAC_MODE,
|
||||
CONTROL4_CURRENT_TEMPERATURE,
|
||||
CONTROL4_HUMIDITY,
|
||||
CONTROL4_CURRENT_TEMPERATURE_F,
|
||||
CONTROL4_CURRENT_TEMPERATURE_C,
|
||||
CONTROL4_COOL_SETPOINT_F,
|
||||
CONTROL4_HEAT_SETPOINT_F,
|
||||
CONTROL4_COOL_SETPOINT_C,
|
||||
CONTROL4_HEAT_SETPOINT_C,
|
||||
CONTROL4_SCALE,
|
||||
CONTROL4_COOL_SETPOINT,
|
||||
CONTROL4_HEAT_SETPOINT,
|
||||
CONTROL4_FAN_MODE,
|
||||
CONTROL4_FAN_MODES_LIST,
|
||||
}
|
||||
@@ -169,6 +156,7 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
"""Control4 climate entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_translation_key = "thermostat"
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL, HVACMode.HEAT_COOL]
|
||||
|
||||
@@ -225,45 +213,13 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
features |= ClimateEntityFeature.FAN_MODE
|
||||
return features
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the temperature unit based on the thermostat's SCALE setting."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return UnitOfTemperature.CELSIUS # Default per HA conventions
|
||||
if data.get(CONTROL4_SCALE) == "FAHRENHEIT":
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def _cool_setpoint(self) -> float | None:
|
||||
"""Return the cooling setpoint from the appropriate variable."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_COOL_SETPOINT_C)
|
||||
return data.get(CONTROL4_COOL_SETPOINT_F)
|
||||
|
||||
@property
|
||||
def _heat_setpoint(self) -> float | None:
|
||||
"""Return the heating setpoint from the appropriate variable."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_HEAT_SETPOINT_C)
|
||||
return data.get(CONTROL4_HEAT_SETPOINT_F)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE_C)
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE_F)
|
||||
return data.get(CONTROL4_CURRENT_TEMPERATURE)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> int | None:
|
||||
@@ -301,25 +257,34 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
hvac_mode = self.hvac_mode
|
||||
if hvac_mode == HVACMode.COOL:
|
||||
return self._cool_setpoint
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
return self._heat_setpoint
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the high target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return self._cool_setpoint
|
||||
return data.get(CONTROL4_COOL_SETPOINT)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the low target temperature for auto mode."""
|
||||
data = self._thermostat_data
|
||||
if data is None:
|
||||
return None
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
return self._heat_setpoint
|
||||
return data.get(CONTROL4_HEAT_SETPOINT)
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -361,27 +326,15 @@ class Control4Climate(Control4Entity, ClimateEntity):
|
||||
# Handle temperature range for auto mode
|
||||
if self.hvac_mode == HVACMode.HEAT_COOL:
|
||||
if low_temp is not None:
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setHeatSetpointC(low_temp)
|
||||
else:
|
||||
await c4_climate.setHeatSetpointF(low_temp)
|
||||
await c4_climate.setHeatSetpointF(low_temp)
|
||||
if high_temp is not None:
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setCoolSetpointC(high_temp)
|
||||
else:
|
||||
await c4_climate.setCoolSetpointF(high_temp)
|
||||
await c4_climate.setCoolSetpointF(high_temp)
|
||||
# Handle single temperature setpoint
|
||||
elif temp is not None:
|
||||
if self.hvac_mode == HVACMode.COOL:
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setCoolSetpointC(temp)
|
||||
else:
|
||||
await c4_climate.setCoolSetpointF(temp)
|
||||
await c4_climate.setCoolSetpointF(temp)
|
||||
elif self.hvac_mode == HVACMode.HEAT:
|
||||
if self.temperature_unit == UnitOfTemperature.CELSIUS:
|
||||
await c4_climate.setHeatSetpointC(temp)
|
||||
else:
|
||||
await c4_climate.setHeatSetpointF(temp)
|
||||
await c4_climate.setHeatSetpointF(temp)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
@@ -598,7 +598,7 @@ class DefaultAgent(ConversationEntity):
|
||||
error_response_type, error_response_args = _get_match_error_response(
|
||||
self.hass, match_error
|
||||
)
|
||||
intent_response = _make_error_result(
|
||||
return _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.NO_VALID_TARGETS,
|
||||
self._get_error_text(
|
||||
@@ -609,7 +609,7 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intent was valid and entities matched constraints, but an error
|
||||
# occurred during handling.
|
||||
_LOGGER.exception("Intent handling error")
|
||||
intent_response = _make_error_result(
|
||||
return _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.FAILED_TO_HANDLE,
|
||||
self._get_error_text(
|
||||
@@ -618,7 +618,7 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
except intent.IntentUnexpectedError:
|
||||
_LOGGER.exception("Unexpected intent error")
|
||||
intent_response = _make_error_result(
|
||||
return _make_error_result(
|
||||
language,
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
self._get_error_text(ErrorKey.HANDLE_ERROR, lang_intents),
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pydaikin.daikin_base import Appliance
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_MODE,
|
||||
@@ -24,7 +21,6 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
@@ -33,19 +29,12 @@ from .const import (
|
||||
ATTR_STATE_OFF,
|
||||
ATTR_STATE_ON,
|
||||
ATTR_TARGET_TEMPERATURE,
|
||||
DOMAIN,
|
||||
ZONE_NAME_UNCONFIGURED,
|
||||
)
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
from .entity import DaikinEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type DaikinZone = Sequence[str | int]
|
||||
|
||||
DAIKIN_ZONE_TEMP_HEAT = "lztemp_h"
|
||||
DAIKIN_ZONE_TEMP_COOL = "lztemp_c"
|
||||
|
||||
|
||||
HA_STATE_TO_DAIKIN = {
|
||||
HVACMode.FAN_ONLY: "fan",
|
||||
@@ -89,70 +78,6 @@ HA_ATTR_TO_DAIKIN = {
|
||||
}
|
||||
|
||||
DAIKIN_ATTR_ADVANCED = "adv"
|
||||
ZONE_TEMPERATURE_WINDOW = 2
|
||||
|
||||
|
||||
def _zone_error(
|
||||
translation_key: str, placeholders: dict[str, str] | None = None
|
||||
) -> HomeAssistantError:
|
||||
"""Return a Home Assistant error with Daikin translation info."""
|
||||
return HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=placeholders,
|
||||
)
|
||||
|
||||
|
||||
def _zone_is_configured(zone: DaikinZone) -> bool:
|
||||
"""Return True if the Daikin zone represents a configured zone."""
|
||||
if not zone:
|
||||
return False
|
||||
return zone[0] != ZONE_NAME_UNCONFIGURED
|
||||
|
||||
|
||||
def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
"""Return the decoded zone temperature lists."""
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError:
|
||||
return ([], [])
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
def _supports_zone_temperature_control(device: Appliance) -> bool:
|
||||
"""Return True if the device exposes zone temperature settings."""
|
||||
zones = device.zones
|
||||
if not zones:
|
||||
return False
|
||||
heating, cooling = _zone_temperature_lists(device)
|
||||
return bool(
|
||||
heating
|
||||
and cooling
|
||||
and len(heating) >= len(zones)
|
||||
and len(cooling) >= len(zones)
|
||||
)
|
||||
|
||||
|
||||
def _system_target_temperature(device: Appliance) -> float | None:
|
||||
"""Return the system target temperature when available."""
|
||||
target = device.target_temperature
|
||||
if target is None:
|
||||
return None
|
||||
try:
|
||||
return float(target)
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _zone_temperature_from_list(values: list[str], zone_id: int) -> float | None:
|
||||
"""Return the parsed temperature for a zone from a Daikin list."""
|
||||
if zone_id >= len(values):
|
||||
return None
|
||||
try:
|
||||
return float(values[zone_id])
|
||||
except TypeError, ValueError:
|
||||
return None
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -161,16 +86,8 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Daikin climate based on config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[ClimateEntity] = [DaikinClimate(coordinator)]
|
||||
if _supports_zone_temperature_control(coordinator.device):
|
||||
zones = coordinator.device.zones or []
|
||||
entities.extend(
|
||||
DaikinZoneClimate(coordinator, zone_id)
|
||||
for zone_id, zone in enumerate(zones)
|
||||
if _zone_is_configured(zone)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
daikin_api = entry.runtime_data
|
||||
async_add_entities([DaikinClimate(daikin_api)])
|
||||
|
||||
|
||||
def format_target_temperature(target_temperature: float) -> str:
|
||||
@@ -367,130 +284,3 @@ class DaikinClimate(DaikinEntity, ClimateEntity):
|
||||
{HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE]: HA_STATE_TO_DAIKIN[HVACMode.OFF]}
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
|
||||
class DaikinZoneClimate(DaikinEntity, ClimateEntity):
|
||||
"""Representation of a Daikin zone temperature controller."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_target_temperature_step = 1
|
||||
|
||||
def __init__(self, coordinator: DaikinCoordinator, zone_id: int) -> None:
|
||||
"""Initialize the zone climate entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone_id = zone_id
|
||||
self._attr_unique_id = f"{self.device.mac}-zone{zone_id}-temperature"
|
||||
zone_name = self.device.zones[self._zone_id][0]
|
||||
self._attr_name = f"{zone_name} temperature"
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the hvac modes (mirrors the main unit)."""
|
||||
return [self.hvac_mode]
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return the current HVAC mode."""
|
||||
daikin_mode = self.device.represent(HA_ATTR_TO_DAIKIN[ATTR_HVAC_MODE])[1]
|
||||
return DAIKIN_TO_HA_STATE.get(daikin_mode, HVACMode.HEAT_COOL)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current HVAC action."""
|
||||
return HA_STATE_TO_CURRENT_HVAC.get(self.hvac_mode)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the zone target temperature for the active mode."""
|
||||
heating, cooling = _zone_temperature_lists(self.device)
|
||||
mode = self.hvac_mode
|
||||
if mode == HVACMode.HEAT:
|
||||
return _zone_temperature_from_list(heating, self._zone_id)
|
||||
if mode == HVACMode.COOL:
|
||||
return _zone_temperature_from_list(cooling, self._zone_id)
|
||||
return None
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum selectable temperature."""
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
return super().min_temp
|
||||
return target - ZONE_TEMPERATURE_WINDOW
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum selectable temperature."""
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
return super().max_temp
|
||||
return target + ZONE_TEMPERATURE_WINDOW
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and _supports_zone_temperature_control(self.device)
|
||||
and _system_target_temperature(self.device) is not None
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return additional metadata."""
|
||||
return {"zone_id": self._zone_id}
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the zone temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_temperature_missing",
|
||||
)
|
||||
zones = self.device.zones
|
||||
if not zones or not _supports_zone_temperature_control(self.device):
|
||||
raise _zone_error("zone_parameters_unavailable")
|
||||
|
||||
try:
|
||||
zone = zones[self._zone_id]
|
||||
except (IndexError, TypeError) as err:
|
||||
raise _zone_error(
|
||||
"zone_missing",
|
||||
{
|
||||
"zone_id": str(self._zone_id),
|
||||
"max_zone": str(len(zones) - 1),
|
||||
},
|
||||
) from err
|
||||
|
||||
if not _zone_is_configured(zone):
|
||||
raise _zone_error("zone_inactive", {"zone_id": str(self._zone_id)})
|
||||
|
||||
temperature_value = float(temperature)
|
||||
target = _system_target_temperature(self.device)
|
||||
if target is None:
|
||||
raise _zone_error("zone_parameters_unavailable")
|
||||
|
||||
mode = self.hvac_mode
|
||||
if mode == HVACMode.HEAT:
|
||||
zone_key = DAIKIN_ZONE_TEMP_HEAT
|
||||
elif mode == HVACMode.COOL:
|
||||
zone_key = DAIKIN_ZONE_TEMP_COOL
|
||||
else:
|
||||
raise _zone_error("zone_hvac_mode_unsupported")
|
||||
|
||||
zone_value = str(round(temperature_value))
|
||||
try:
|
||||
await self.device.set_zone(self._zone_id, zone_key, zone_value)
|
||||
except (AttributeError, KeyError, NotImplementedError, TypeError) as err:
|
||||
raise _zone_error("zone_set_failed") from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Disallow changing HVAC mode via zone climate."""
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="zone_hvac_read_only",
|
||||
)
|
||||
|
||||
@@ -24,6 +24,4 @@ ATTR_STATE_OFF = "off"
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
|
||||
ZONE_NAME_UNCONFIGURED = "-"
|
||||
|
||||
TIMEOUT_SEC = 120
|
||||
|
||||
@@ -57,28 +57,5 @@
|
||||
"name": "Power"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"zone_hvac_mode_unsupported": {
|
||||
"message": "Zone temperature can only be changed when the main climate mode is heat or cool."
|
||||
},
|
||||
"zone_hvac_read_only": {
|
||||
"message": "Zone HVAC mode is controlled by the main climate entity."
|
||||
},
|
||||
"zone_inactive": {
|
||||
"message": "Zone {zone_id} is not active. Enable the zone on your Daikin device first."
|
||||
},
|
||||
"zone_missing": {
|
||||
"message": "Zone {zone_id} does not exist. Available zones are 0-{max_zone}."
|
||||
},
|
||||
"zone_parameters_unavailable": {
|
||||
"message": "This device does not expose the required zone temperature parameters."
|
||||
},
|
||||
"zone_set_failed": {
|
||||
"message": "Failed to set zone temperature. The device may not support this operation."
|
||||
},
|
||||
"zone_temperature_missing": {
|
||||
"message": "Provide a temperature value when adjusting a zone."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ZONE_NAME_UNCONFIGURED
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
from .entity import DaikinEntity
|
||||
|
||||
@@ -29,7 +28,7 @@ async def async_setup_entry(
|
||||
switches.extend(
|
||||
DaikinZoneSwitch(daikin_api, zone_id)
|
||||
for zone_id, zone in enumerate(zones)
|
||||
if zone[0] != ZONE_NAME_UNCONFIGURED
|
||||
if zone[0] != "-"
|
||||
)
|
||||
if daikin_api.device.support_advanced_modes:
|
||||
# It isn't possible to find out from the API responses if a specific
|
||||
|
||||
@@ -136,12 +136,12 @@ class EcoNetWaterHeater(EcoNetEntity[WaterHeater], WaterHeaterEntity):
|
||||
return self.water_heater.set_point
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
def min_temp(self):
|
||||
"""Return the minimum temperature."""
|
||||
return self.water_heater.set_point_limits[0]
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
def max_temp(self):
|
||||
"""Return the maximum temperature."""
|
||||
return self.water_heater.set_point_limits[1]
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
|
||||
from .const import DOMAIN
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_MINOR_VERSION = 3
|
||||
STORAGE_MINOR_VERSION = 2
|
||||
STORAGE_KEY = DOMAIN
|
||||
|
||||
|
||||
@@ -92,11 +92,8 @@ class GridPowerSourceType(TypedDict, total=False):
|
||||
power_config: PowerConfig
|
||||
|
||||
|
||||
class LegacyGridSourceType(TypedDict):
|
||||
"""Legacy dictionary holding the source of grid energy consumption.
|
||||
|
||||
This format is deprecated and will be migrated to GridSourceType.
|
||||
"""
|
||||
class GridSourceType(TypedDict):
|
||||
"""Dictionary holding the source of grid energy consumption."""
|
||||
|
||||
type: Literal["grid"]
|
||||
|
||||
@@ -107,40 +104,6 @@ class LegacyGridSourceType(TypedDict):
|
||||
cost_adjustment_day: float
|
||||
|
||||
|
||||
class GridSourceType(TypedDict):
|
||||
"""Dictionary holding a unified grid connection (like batteries).
|
||||
|
||||
Each grid connection represents a single import/export pair with
|
||||
optional power tracking. Multiple grid sources are allowed.
|
||||
"""
|
||||
|
||||
type: Literal["grid"]
|
||||
|
||||
# Import meter - kWh consumed from grid
|
||||
# Can be None for export-only or power-only grids migrated from legacy format
|
||||
stat_energy_from: str | None
|
||||
|
||||
# Export meter (optional) - kWh returned to grid (solar/battery export)
|
||||
stat_energy_to: str | None
|
||||
|
||||
# Cost tracking for import
|
||||
stat_cost: str | None # statistic_id of costs ($) incurred
|
||||
entity_energy_price: str | None # entity_id providing price ($/kWh)
|
||||
number_energy_price: float | None # Fixed price ($/kWh)
|
||||
|
||||
# Compensation tracking for export
|
||||
stat_compensation: str | None # statistic_id of compensation ($) received
|
||||
entity_energy_price_export: str | None # entity_id providing export price ($/kWh)
|
||||
number_energy_price_export: float | None # Fixed export price ($/kWh)
|
||||
|
||||
# Power measurement (optional)
|
||||
# positive when consuming from grid, negative when exporting
|
||||
stat_rate: NotRequired[str]
|
||||
power_config: NotRequired[PowerConfig]
|
||||
|
||||
cost_adjustment_day: float
|
||||
|
||||
|
||||
class SolarSourceType(TypedDict):
|
||||
"""Dictionary holding the source of energy production."""
|
||||
|
||||
@@ -345,77 +308,23 @@ def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[di
|
||||
return validate_uniqueness
|
||||
|
||||
|
||||
def _grid_ensure_single_price_import(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure we use a single price source for import."""
|
||||
if (
|
||||
val.get("entity_energy_price") is not None
|
||||
and val.get("number_energy_price") is not None
|
||||
):
|
||||
raise vol.Invalid("Define either an entity or a fixed number for import price")
|
||||
return val
|
||||
|
||||
|
||||
def _grid_ensure_single_price_export(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure we use a single price source for export."""
|
||||
if (
|
||||
val.get("entity_energy_price_export") is not None
|
||||
and val.get("number_energy_price_export") is not None
|
||||
):
|
||||
raise vol.Invalid("Define either an entity or a fixed number for export price")
|
||||
return val
|
||||
|
||||
|
||||
def _grid_ensure_at_least_one_stat(
|
||||
val: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Ensure at least one of import, export, or power is configured."""
|
||||
if (
|
||||
val.get("stat_energy_from") is None
|
||||
and val.get("stat_energy_to") is None
|
||||
and val.get("stat_rate") is None
|
||||
and val.get("power_config") is None
|
||||
):
|
||||
raise vol.Invalid(
|
||||
"Grid must have at least one of: import meter, export meter, or power sensor"
|
||||
)
|
||||
return val
|
||||
|
||||
|
||||
GRID_SOURCE_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "grid",
|
||||
# Import meter (can be None for export-only grids from legacy migration)
|
||||
vol.Optional("stat_energy_from", default=None): vol.Any(str, None),
|
||||
# Export meter (optional)
|
||||
vol.Optional("stat_energy_to", default=None): vol.Any(str, None),
|
||||
# Import cost tracking
|
||||
vol.Optional("stat_cost", default=None): vol.Any(str, None),
|
||||
vol.Optional("entity_energy_price", default=None): vol.Any(str, None),
|
||||
vol.Optional("number_energy_price", default=None): vol.Any(
|
||||
vol.Coerce(float), None
|
||||
),
|
||||
# Export compensation tracking
|
||||
vol.Optional("stat_compensation", default=None): vol.Any(str, None),
|
||||
vol.Optional("entity_energy_price_export", default=None): vol.Any(
|
||||
str, None
|
||||
),
|
||||
vol.Optional("number_energy_price_export", default=None): vol.Any(
|
||||
vol.Coerce(float), None
|
||||
),
|
||||
# Power measurement (optional)
|
||||
vol.Optional("stat_rate"): str,
|
||||
vol.Optional("power_config"): POWER_CONFIG_SCHEMA,
|
||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||
}
|
||||
),
|
||||
_grid_ensure_single_price_import,
|
||||
_grid_ensure_single_price_export,
|
||||
_grid_ensure_at_least_one_stat,
|
||||
GRID_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "grid",
|
||||
vol.Required("flow_from"): vol.All(
|
||||
[FLOW_FROM_GRID_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_energy_from"),
|
||||
),
|
||||
vol.Required("flow_to"): vol.All(
|
||||
[FLOW_TO_GRID_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_energy_to"),
|
||||
),
|
||||
vol.Optional("power"): vol.All(
|
||||
[GRID_POWER_SOURCE_SCHEMA],
|
||||
_generate_unique_value_validator("stat_rate"),
|
||||
),
|
||||
vol.Required("cost_adjustment_day"): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
SOLAR_SOURCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -460,46 +369,10 @@ WATER_SOURCE_SCHEMA = vol.Schema(
|
||||
|
||||
def check_type_limits(value: list[SourceType]) -> list[SourceType]:
|
||||
"""Validate that we don't have too many of certain types."""
|
||||
# Currently no type limits - multiple grid sources are allowed (like batteries)
|
||||
return value
|
||||
types = Counter([val["type"] for val in value])
|
||||
|
||||
|
||||
def _validate_grid_stat_uniqueness(value: list[SourceType]) -> list[SourceType]:
|
||||
"""Validate that grid statistics are unique across all sources."""
|
||||
seen_import: set[str] = set()
|
||||
seen_export: set[str] = set()
|
||||
seen_rate: set[str] = set()
|
||||
|
||||
for source in value:
|
||||
if source.get("type") != "grid":
|
||||
continue
|
||||
|
||||
# Cast to GridSourceType since we've filtered for grid type
|
||||
grid_source: GridSourceType = source # type: ignore[assignment]
|
||||
|
||||
# Check import meter uniqueness
|
||||
if (stat_from := grid_source.get("stat_energy_from")) is not None:
|
||||
if stat_from in seen_import:
|
||||
raise vol.Invalid(
|
||||
f"Import meter {stat_from} is used in multiple grid connections"
|
||||
)
|
||||
seen_import.add(stat_from)
|
||||
|
||||
# Check export meter uniqueness
|
||||
if (stat_to := grid_source.get("stat_energy_to")) is not None:
|
||||
if stat_to in seen_export:
|
||||
raise vol.Invalid(
|
||||
f"Export meter {stat_to} is used in multiple grid connections"
|
||||
)
|
||||
seen_export.add(stat_to)
|
||||
|
||||
# Check power stat uniqueness
|
||||
if (stat_rate := grid_source.get("stat_rate")) is not None:
|
||||
if stat_rate in seen_rate:
|
||||
raise vol.Invalid(
|
||||
f"Power stat {stat_rate} is used in multiple grid connections"
|
||||
)
|
||||
seen_rate.add(stat_rate)
|
||||
if types.get("grid", 0) > 1:
|
||||
raise vol.Invalid("You cannot have more than 1 grid source")
|
||||
|
||||
return value
|
||||
|
||||
@@ -520,7 +393,6 @@ ENERGY_SOURCE_SCHEMA = vol.All(
|
||||
]
|
||||
),
|
||||
check_type_limits,
|
||||
_validate_grid_stat_uniqueness,
|
||||
)
|
||||
|
||||
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
@@ -533,82 +405,6 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _migrate_legacy_grid_to_unified(
|
||||
old_grid: dict[str, Any],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Migrate legacy grid format (flow_from/flow_to/power arrays) to unified format.
|
||||
|
||||
Each grid connection can have any combination of import, export, and power -
|
||||
all are optional as long as at least one is configured.
|
||||
|
||||
Migration pairs arrays by index position:
|
||||
- flow_from[i], flow_to[i], and power[i] combine into grid connection i
|
||||
- If arrays have different lengths, missing entries get None for that field
|
||||
- The number of grid connections equals max(len(flow_from), len(flow_to), len(power))
|
||||
"""
|
||||
flow_from = old_grid.get("flow_from", [])
|
||||
flow_to = old_grid.get("flow_to", [])
|
||||
power_list = old_grid.get("power", [])
|
||||
cost_adj = old_grid.get("cost_adjustment_day", 0.0)
|
||||
|
||||
new_sources: list[dict[str, Any]] = []
|
||||
# Number of grid connections = max length across all three arrays
|
||||
# If all arrays are empty, don't create any grid sources
|
||||
max_len = max(len(flow_from), len(flow_to), len(power_list))
|
||||
if max_len == 0:
|
||||
return []
|
||||
|
||||
for i in range(max_len):
|
||||
source: dict[str, Any] = {
|
||||
"type": "grid",
|
||||
"cost_adjustment_day": cost_adj,
|
||||
}
|
||||
|
||||
# Import fields from flow_from
|
||||
if i < len(flow_from):
|
||||
ff = flow_from[i]
|
||||
source["stat_energy_from"] = ff.get("stat_energy_from") or None
|
||||
source["stat_cost"] = ff.get("stat_cost")
|
||||
source["entity_energy_price"] = ff.get("entity_energy_price")
|
||||
source["number_energy_price"] = ff.get("number_energy_price")
|
||||
else:
|
||||
# Export-only entry - set import to None (validation will flag this)
|
||||
source["stat_energy_from"] = None
|
||||
source["stat_cost"] = None
|
||||
source["entity_energy_price"] = None
|
||||
source["number_energy_price"] = None
|
||||
|
||||
# Export fields from flow_to
|
||||
if i < len(flow_to):
|
||||
ft = flow_to[i]
|
||||
source["stat_energy_to"] = ft.get("stat_energy_to")
|
||||
source["stat_compensation"] = ft.get("stat_compensation")
|
||||
source["entity_energy_price_export"] = ft.get("entity_energy_price")
|
||||
source["number_energy_price_export"] = ft.get("number_energy_price")
|
||||
else:
|
||||
source["stat_energy_to"] = None
|
||||
source["stat_compensation"] = None
|
||||
source["entity_energy_price_export"] = None
|
||||
source["number_energy_price_export"] = None
|
||||
|
||||
# Power config at index i goes to grid connection at index i
|
||||
if i < len(power_list):
|
||||
power = power_list[i]
|
||||
if "power_config" in power:
|
||||
source["power_config"] = power["power_config"]
|
||||
if "stat_rate" in power:
|
||||
source["stat_rate"] = power["stat_rate"]
|
||||
|
||||
new_sources.append(source)
|
||||
|
||||
return new_sources
|
||||
|
||||
|
||||
def _is_legacy_grid_format(source: dict[str, Any]) -> bool:
|
||||
"""Check if a grid source is in the legacy format."""
|
||||
return source.get("type") == "grid" and "flow_from" in source
|
||||
|
||||
|
||||
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
"""Energy preferences store with migration support."""
|
||||
|
||||
@@ -623,18 +419,6 @@ class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
if old_major_version == 1 and old_minor_version < 2:
|
||||
# Add device_consumption_water field if it doesn't exist
|
||||
data.setdefault("device_consumption_water", [])
|
||||
|
||||
if old_major_version == 1 and old_minor_version < 3:
|
||||
# Migrate legacy grid format to unified format
|
||||
new_sources: list[dict[str, Any]] = []
|
||||
for source in data.get("energy_sources", []):
|
||||
if _is_legacy_grid_format(source):
|
||||
# Convert legacy grid to multiple unified grid sources
|
||||
new_sources.extend(_migrate_legacy_grid_to_unified(source))
|
||||
else:
|
||||
new_sources.append(source)
|
||||
data["energy_sources"] = new_sources
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -732,18 +516,27 @@ class EnergyManager:
|
||||
source: GridSourceType,
|
||||
generate_entity_id: Callable[[str, PowerConfig], str],
|
||||
) -> GridSourceType:
|
||||
"""Set stat_rate for grid if power_config is specified."""
|
||||
if "power_config" not in source:
|
||||
"""Set stat_rate for grid power sources if power_config is specified."""
|
||||
if "power" not in source:
|
||||
return source
|
||||
|
||||
config = source["power_config"]
|
||||
processed_power: list[GridPowerSourceType] = []
|
||||
for power in source["power"]:
|
||||
if "power_config" in power:
|
||||
config = power["power_config"]
|
||||
|
||||
# If power_config has stat_rate (standard), just use it directly
|
||||
if "stat_rate" in config:
|
||||
return {**source, "stat_rate": config["stat_rate"]}
|
||||
# If power_config has stat_rate (standard), just use it directly
|
||||
if "stat_rate" in config:
|
||||
processed_power.append({**power, "stat_rate": config["stat_rate"]})
|
||||
else:
|
||||
# For inverted or two-sensor config, set stat_rate to generated entity_id
|
||||
processed_power.append(
|
||||
{**power, "stat_rate": generate_entity_id("grid", config)}
|
||||
)
|
||||
else:
|
||||
processed_power.append(power)
|
||||
|
||||
# For inverted or two-sensor config, set stat_rate to the generated entity_id
|
||||
return {**source, "stat_rate": generate_entity_id("grid", config)}
|
||||
return {**source, "power": processed_power}
|
||||
|
||||
@callback
|
||||
def async_listen_updates(self, update_listener: Callable[[], Awaitable]) -> None:
|
||||
|
||||
@@ -94,15 +94,22 @@ class SourceAdapter:
|
||||
|
||||
|
||||
SOURCE_ADAPTERS: Final = (
|
||||
# Grid import cost (unified format)
|
||||
SourceAdapter(
|
||||
"grid",
|
||||
None, # No flow_type - unified format
|
||||
"flow_from",
|
||||
"stat_energy_from",
|
||||
"stat_cost",
|
||||
"Cost",
|
||||
"cost",
|
||||
),
|
||||
SourceAdapter(
|
||||
"grid",
|
||||
"flow_to",
|
||||
"stat_energy_to",
|
||||
"stat_compensation",
|
||||
"Compensation",
|
||||
"compensation",
|
||||
),
|
||||
SourceAdapter(
|
||||
"gas",
|
||||
None,
|
||||
@@ -121,16 +128,6 @@ SOURCE_ADAPTERS: Final = (
|
||||
),
|
||||
)
|
||||
|
||||
# Separate adapter for grid export compensation (needs different price field)
|
||||
GRID_EXPORT_ADAPTER: Final = SourceAdapter(
|
||||
"grid",
|
||||
None, # No flow_type - unified format
|
||||
"stat_energy_to",
|
||||
"stat_compensation",
|
||||
"Compensation",
|
||||
"compensation",
|
||||
)
|
||||
|
||||
|
||||
class EntityNotFoundError(HomeAssistantError):
|
||||
"""When a referenced entity was not found."""
|
||||
@@ -186,20 +183,22 @@ class SensorManager:
|
||||
if adapter.source_type != energy_source["type"]:
|
||||
continue
|
||||
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
if adapter.flow_type is None:
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
continue
|
||||
|
||||
# Handle grid export compensation (unified format uses different price fields)
|
||||
if energy_source["type"] == "grid":
|
||||
self._process_grid_export_sensor(
|
||||
energy_source,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item]
|
||||
self._process_sensor_data(
|
||||
adapter,
|
||||
flow,
|
||||
to_add,
|
||||
to_remove,
|
||||
)
|
||||
|
||||
# Process power sensors for battery and grid sources
|
||||
self._process_power_sensor_data(
|
||||
@@ -223,16 +222,11 @@ class SensorManager:
|
||||
if config.get(adapter.total_money_key) is not None:
|
||||
return
|
||||
|
||||
# Skip if the energy stat is not configured (e.g., export-only or power-only grids)
|
||||
stat_energy = config.get(adapter.stat_energy_key)
|
||||
if not stat_energy:
|
||||
return
|
||||
|
||||
key = (adapter.source_type, adapter.flow_type, stat_energy)
|
||||
key = (adapter.source_type, adapter.flow_type, config[adapter.stat_energy_key])
|
||||
|
||||
# Make sure the right data is there
|
||||
# If the entity existed, we don't pop it from to_remove so it's removed
|
||||
if not valid_entity_id(stat_energy) or (
|
||||
if not valid_entity_id(config[adapter.stat_energy_key]) or (
|
||||
config.get("entity_energy_price") is None
|
||||
and config.get("number_energy_price") is None
|
||||
):
|
||||
@@ -248,56 +242,6 @@ class SensorManager:
|
||||
)
|
||||
to_add.append(self.current_entities[key])
|
||||
|
||||
@callback
|
||||
def _process_grid_export_sensor(
|
||||
self,
|
||||
config: Mapping[str, Any],
|
||||
to_add: list[EnergyCostSensor | EnergyPowerSensor],
|
||||
to_remove: dict[tuple[str, str | None, str], EnergyCostSensor],
|
||||
) -> None:
|
||||
"""Process grid export compensation sensor (unified format).
|
||||
|
||||
The unified grid format uses different field names for export pricing:
|
||||
- entity_energy_price_export instead of entity_energy_price
|
||||
- number_energy_price_export instead of number_energy_price
|
||||
"""
|
||||
# No export meter configured
|
||||
stat_energy_to = config.get("stat_energy_to")
|
||||
if stat_energy_to is None:
|
||||
return
|
||||
|
||||
# Already have a compensation stat
|
||||
if config.get("stat_compensation") is not None:
|
||||
return
|
||||
|
||||
key = ("grid", None, stat_energy_to)
|
||||
|
||||
# Check for export pricing fields (different names in unified format)
|
||||
if not valid_entity_id(stat_energy_to) or (
|
||||
config.get("entity_energy_price_export") is None
|
||||
and config.get("number_energy_price_export") is None
|
||||
):
|
||||
return
|
||||
|
||||
# Create a config wrapper that maps the sell price fields to standard names
|
||||
# so EnergyCostSensor can use them
|
||||
export_config: dict[str, Any] = {
|
||||
"stat_energy_to": stat_energy_to,
|
||||
"stat_compensation": config.get("stat_compensation"),
|
||||
"entity_energy_price": config.get("entity_energy_price_export"),
|
||||
"number_energy_price": config.get("number_energy_price_export"),
|
||||
}
|
||||
|
||||
if current_entity := to_remove.pop(key, None):
|
||||
current_entity.update_config(export_config)
|
||||
return
|
||||
|
||||
self.current_entities[key] = EnergyCostSensor(
|
||||
GRID_EXPORT_ADAPTER,
|
||||
export_config,
|
||||
)
|
||||
to_add.append(self.current_entities[key])
|
||||
|
||||
@callback
|
||||
def _process_power_sensor_data(
|
||||
self,
|
||||
@@ -308,14 +252,21 @@ class SensorManager:
|
||||
"""Process power sensor data for battery and grid sources."""
|
||||
source_type = energy_source.get("type")
|
||||
|
||||
if source_type in ("battery", "grid"):
|
||||
# Both battery and grid now use unified format with power_config at top level
|
||||
if source_type == "battery":
|
||||
power_config = energy_source.get("power_config")
|
||||
if power_config and self._needs_power_sensor(power_config):
|
||||
self._create_or_keep_power_sensor(
|
||||
source_type, power_config, to_add, to_remove
|
||||
)
|
||||
|
||||
elif source_type == "grid":
|
||||
for power in energy_source.get("power", []):
|
||||
power_config = power.get("power_config")
|
||||
if power_config and self._needs_power_sensor(power_config):
|
||||
self._create_or_keep_power_sensor(
|
||||
source_type, power_config, to_add, to_remove
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _needs_power_sensor(power_config: PowerConfig) -> bool:
|
||||
"""Check if power_config needs a transform sensor."""
|
||||
@@ -361,17 +312,6 @@ class EnergyCostSensor(SensorEntity):
|
||||
|
||||
This is intended as a fallback for when no specific cost sensor is available for the
|
||||
utility.
|
||||
|
||||
Expected config fields (from adapter or export_config wrapper):
|
||||
- stat_energy_key (via adapter): Key to get the energy statistic ID
|
||||
- total_money_key (via adapter): Key to get the existing cost/compensation stat
|
||||
- entity_energy_price: Entity ID providing price per unit (e.g., $/kWh)
|
||||
- number_energy_price: Fixed price per unit
|
||||
|
||||
Note: For grid export compensation, the unified format uses different field names
|
||||
(entity_energy_price_export, number_energy_price_export). The _process_grid_export_sensor
|
||||
method in SensorManager creates a wrapper config that maps these to the standard
|
||||
field names (entity_energy_price, number_energy_price) so this class can use them.
|
||||
"""
|
||||
|
||||
_attr_entity_registry_visible_default = False
|
||||
|
||||
@@ -401,20 +401,16 @@ def _validate_grid_source(
|
||||
source_result: ValidationIssues,
|
||||
validate_calls: list[functools.partial[None]],
|
||||
) -> None:
|
||||
"""Validate grid energy source (unified format)."""
|
||||
stat_energy_from = source.get("stat_energy_from")
|
||||
stat_energy_to = source.get("stat_energy_to")
|
||||
stat_rate = source.get("stat_rate")
|
||||
|
||||
# Validate import meter (optional)
|
||||
if stat_energy_from:
|
||||
wanted_statistics_metadata.add(stat_energy_from)
|
||||
"""Validate grid energy source."""
|
||||
flow_from: data.FlowFromGridSourceType
|
||||
for flow_from in source["flow_from"]:
|
||||
wanted_statistics_metadata.add(flow_from["stat_energy_from"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_energy_from,
|
||||
flow_from["stat_energy_from"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
@@ -422,8 +418,7 @@ def _validate_grid_source(
|
||||
)
|
||||
)
|
||||
|
||||
# Validate import cost tracking (only if import meter exists)
|
||||
if (stat_cost := source.get("stat_cost")) is not None:
|
||||
if (stat_cost := flow_from.get("stat_cost")) is not None:
|
||||
wanted_statistics_metadata.add(stat_cost)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
@@ -434,7 +429,7 @@ def _validate_grid_source(
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
|
||||
elif (entity_energy_price := flow_from.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
@@ -447,27 +442,27 @@ def _validate_grid_source(
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price") is not None
|
||||
or source.get("number_energy_price") is not None
|
||||
flow_from.get("entity_energy_price") is not None
|
||||
or flow_from.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
stat_energy_from,
|
||||
flow_from["stat_energy_from"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
# Validate export meter (optional)
|
||||
if stat_energy_to:
|
||||
wanted_statistics_metadata.add(stat_energy_to)
|
||||
flow_to: data.FlowToGridSourceType
|
||||
for flow_to in source["flow_to"]:
|
||||
wanted_statistics_metadata.add(flow_to["stat_energy_to"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_energy_to,
|
||||
flow_to["stat_energy_to"],
|
||||
ENERGY_USAGE_DEVICE_CLASSES,
|
||||
ENERGY_USAGE_UNITS,
|
||||
ENERGY_UNIT_ERROR,
|
||||
@@ -475,8 +470,7 @@ def _validate_grid_source(
|
||||
)
|
||||
)
|
||||
|
||||
# Validate export compensation tracking
|
||||
if (stat_compensation := source.get("stat_compensation")) is not None:
|
||||
if (stat_compensation := flow_to.get("stat_compensation")) is not None:
|
||||
wanted_statistics_metadata.add(stat_compensation)
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
@@ -487,14 +481,12 @@ def _validate_grid_source(
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
elif (
|
||||
entity_price_export := source.get("entity_energy_price_export")
|
||||
) is not None:
|
||||
elif (entity_energy_price := flow_to.get("entity_energy_price")) is not None:
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_price_entity,
|
||||
hass,
|
||||
entity_price_export,
|
||||
entity_energy_price,
|
||||
source_result,
|
||||
ENERGY_PRICE_UNITS,
|
||||
ENERGY_PRICE_UNIT_ERROR,
|
||||
@@ -502,27 +494,26 @@ def _validate_grid_source(
|
||||
)
|
||||
|
||||
if (
|
||||
source.get("entity_energy_price_export") is not None
|
||||
or source.get("number_energy_price_export") is not None
|
||||
flow_to.get("entity_energy_price") is not None
|
||||
or flow_to.get("number_energy_price") is not None
|
||||
):
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_auto_generated_cost_entity,
|
||||
hass,
|
||||
stat_energy_to,
|
||||
flow_to["stat_energy_to"],
|
||||
source_result,
|
||||
)
|
||||
)
|
||||
|
||||
# Validate power sensor (optional)
|
||||
if stat_rate:
|
||||
wanted_statistics_metadata.add(stat_rate)
|
||||
for power_stat in source.get("power", []):
|
||||
wanted_statistics_metadata.add(power_stat["stat_rate"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_power_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
stat_rate,
|
||||
power_stat["stat_rate"],
|
||||
POWER_USAGE_DEVICE_CLASSES,
|
||||
POWER_USAGE_UNITS,
|
||||
POWER_UNIT_ERROR,
|
||||
|
||||
@@ -77,7 +77,7 @@ class FacebookNotificationService(BaseNotificationService):
|
||||
"recipient": recipient,
|
||||
"message": body_message,
|
||||
"messaging_type": "MESSAGE_TAG",
|
||||
"tag": "HUMAN_AGENT",
|
||||
"tag": "ACCOUNT_UPDATE",
|
||||
}
|
||||
resp = requests.post(
|
||||
BASE_URL,
|
||||
|
||||
@@ -8,14 +8,15 @@ from aiohttp.client_exceptions import ClientConnectorError
|
||||
from gios import Gios
|
||||
from gios.exceptions import GiosError
|
||||
|
||||
from homeassistant.components.air_quality import DOMAIN as AIR_QUALITY_PLATFORM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_STATION_ID, DOMAIN
|
||||
from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
|
||||
from .coordinator import GiosConfigEntry, GiosData, GiosDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -55,10 +56,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: GiosConfigEntry) -> bool
|
||||
coordinator = GiosDataUpdateCoordinator(hass, entry, gios)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
entry.runtime_data = GiosData(coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Remove air_quality entities from registry if they exist
|
||||
ent_reg = er.async_get(hass)
|
||||
unique_id = str(coordinator.gios.station_id)
|
||||
if entity_id := ent_reg.async_get_entity_id(
|
||||
AIR_QUALITY_PLATFORM, DOMAIN, unique_id
|
||||
):
|
||||
_LOGGER.debug("Removing deprecated air_quality entity %s", entity_id)
|
||||
ent_reg.async_remove(entity_id)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -38,18 +38,14 @@ class GiosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
station_id = user_input[CONF_STATION_ID]
|
||||
|
||||
await self.async_set_unique_id(station_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
await self.async_set_unique_id(station_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
async with asyncio.timeout(API_TIMEOUT):
|
||||
gios = await Gios.create(websession, int(station_id))
|
||||
await gios.async_update()
|
||||
except ApiError, ClientConnectorError, TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidSensorsDataError:
|
||||
errors[CONF_STATION_ID] = "invalid_sensors_data"
|
||||
else:
|
||||
|
||||
# GIOS treats station ID as int
|
||||
user_input[CONF_STATION_ID] = int(station_id)
|
||||
|
||||
@@ -64,6 +60,10 @@ class GiosFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
# raising errors.
|
||||
data={**user_input, CONF_NAME: gios.station_name},
|
||||
)
|
||||
except ApiError, ClientConnectorError, TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidSensorsDataError:
|
||||
errors[CONF_STATION_ID] = "invalid_sensors_data"
|
||||
|
||||
try:
|
||||
gios = await Gios.create(websession)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -21,7 +22,14 @@ from .const import API_TIMEOUT, DOMAIN, MANUFACTURER, SCAN_INTERVAL, URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type GiosConfigEntry = ConfigEntry[GiosDataUpdateCoordinator]
|
||||
type GiosConfigEntry = ConfigEntry[GiosData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class GiosData:
|
||||
"""Data for GIOS integration."""
|
||||
|
||||
coordinator: GiosDataUpdateCoordinator
|
||||
|
||||
|
||||
class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]):
|
||||
|
||||
@@ -14,7 +14,7 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: GiosConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
|
||||
return {
|
||||
"config_entry": config_entry.as_dict(),
|
||||
|
||||
@@ -7,6 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["dacite", "gios"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["gios==7.0.0"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
rules:
|
||||
# Other comments:
|
||||
# - we could consider removing the air quality entity removal
|
||||
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
@@ -6,8 +9,14 @@ rules:
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment:
|
||||
We should have the happy flow as the first test, which can be merged with test_show_form.
|
||||
The config flow tests are missing adding a duplicate entry test.
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Limit the scope of the try block in the user step
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
@@ -18,7 +27,9 @@ rules:
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
runtime-data:
|
||||
status: todo
|
||||
comment: No direct need to wrap the coordinator in a dataclass to store in the config entry
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
@@ -39,7 +50,11 @@ rules:
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: This integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment:
|
||||
The `test_async_setup_entry` should test the state of the mock config entry, instead of an entity state
|
||||
The `test_availability` doesn't really do what it says it does, and this is now already tested via the snapshot tests.
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
@@ -63,9 +78,13 @@ rules:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: We can use the CO device class for the carbon monoxide sensor
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
entity-translations:
|
||||
status: todo
|
||||
comment: We can remove the options state_attributes.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
|
||||
@@ -72,9 +72,9 @@ SENSOR_TYPES: tuple[GiosSensorEntityDescription, ...] = (
|
||||
key=ATTR_CO,
|
||||
value=lambda sensors: sensors.co.value if sensors.co else None,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="co",
|
||||
),
|
||||
GiosSensorEntityDescription(
|
||||
key=ATTR_NO,
|
||||
@@ -181,7 +181,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add a GIOS entities from a config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
# Due to the change of the attribute name of one sensor, it is necessary to migrate
|
||||
# the unique_id to the new name.
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
@@ -31,11 +31,26 @@
|
||||
"sufficient": "Sufficient",
|
||||
"very_bad": "Very bad",
|
||||
"very_good": "Very good"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"c6h6": {
|
||||
"name": "Benzene"
|
||||
},
|
||||
"co": {
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
},
|
||||
"no2_index": {
|
||||
"name": "Nitrogen dioxide index",
|
||||
"state": {
|
||||
@@ -45,6 +60,18 @@
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nox": {
|
||||
@@ -59,6 +86,18 @@
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm10_index": {
|
||||
@@ -70,6 +109,18 @@
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pm25_index": {
|
||||
@@ -81,6 +132,18 @@
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"so2_index": {
|
||||
@@ -92,6 +155,18 @@
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"options": {
|
||||
"state": {
|
||||
"bad": "[%key:component::gios::entity::sensor::aqi::state::bad%]",
|
||||
"good": "[%key:component::gios::entity::sensor::aqi::state::good%]",
|
||||
"moderate": "[%key:component::gios::entity::sensor::aqi::state::moderate%]",
|
||||
"sufficient": "[%key:component::gios::entity::sensor::aqi::state::sufficient%]",
|
||||
"very_bad": "[%key:component::gios::entity::sensor::aqi::state::very_bad%]",
|
||||
"very_good": "[%key:component::gios::entity::sensor::aqi::state::very_good%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==13.2.0"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -51,86 +51,31 @@ class GoogleGenerativeAITextToSpeechEntity(
|
||||
# Note the documentation might not be up to date, e.g. el-GR is not listed
|
||||
# there but is supported.
|
||||
_attr_supported_languages = [
|
||||
"af-ZA",
|
||||
"am-ET",
|
||||
"ar-EG",
|
||||
"az-AZ",
|
||||
"be-BY",
|
||||
"bg-BG",
|
||||
"bn-BD",
|
||||
"ca-ES",
|
||||
"ceb-PH",
|
||||
"cmn-CN",
|
||||
"cs-CZ",
|
||||
"da-DK",
|
||||
"de-DE",
|
||||
"el-GR",
|
||||
"en-IN",
|
||||
"en-US",
|
||||
"es-ES",
|
||||
"es-US",
|
||||
"et-EE",
|
||||
"eu-ES",
|
||||
"fa-IR",
|
||||
"fi-FI",
|
||||
"fil-PH",
|
||||
"fr-FR",
|
||||
"gl-ES",
|
||||
"gu-IN",
|
||||
"he-IL",
|
||||
"hi-IN",
|
||||
"hr-HR",
|
||||
"ht-HT",
|
||||
"hu-HU",
|
||||
"hy-AM",
|
||||
"id-ID",
|
||||
"is-IS",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"jv-ID",
|
||||
"ka-GE",
|
||||
"kn-IN",
|
||||
"ko-KR",
|
||||
"kok-IN",
|
||||
"la-VA",
|
||||
"lb-LU",
|
||||
"lo-LA",
|
||||
"lt-LT",
|
||||
"lv-LV",
|
||||
"mai-IN",
|
||||
"mg-MG",
|
||||
"mk-MK",
|
||||
"ml-IN",
|
||||
"mn-MN",
|
||||
"mr-IN",
|
||||
"ms-MY",
|
||||
"my-MM",
|
||||
"nb-NO",
|
||||
"ne-NP",
|
||||
"nl-NL",
|
||||
"nn-NO",
|
||||
"or-IN",
|
||||
"pa-IN",
|
||||
"pl-PL",
|
||||
"ps-AF",
|
||||
"pt-BR",
|
||||
"pt-PT",
|
||||
"ro-RO",
|
||||
"ru-RU",
|
||||
"sd-PK",
|
||||
"si-LK",
|
||||
"sk-SK",
|
||||
"sl-SI",
|
||||
"sq-AL",
|
||||
"sr-RS",
|
||||
"sv-SE",
|
||||
"sw-KE",
|
||||
"ta-IN",
|
||||
"te-IN",
|
||||
"th-TH",
|
||||
"tr-TR",
|
||||
"uk-UA",
|
||||
"ur-PK",
|
||||
"vi-VN",
|
||||
]
|
||||
# Unused, but required by base class.
|
||||
|
||||
@@ -68,25 +68,15 @@ MIN_NUMBER_TYPES: tuple[GrowattNumberEntityDescription, ...] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
GrowattNumberEntityDescription(
|
||||
key="battery_discharge_soc_limit", # Keep original key to preserve unique_id
|
||||
translation_key="battery_discharge_soc_limit_off_grid",
|
||||
api_key="wdisChargeSOCLowLimit", # Key returned by V1 API (off-grid)
|
||||
key="battery_discharge_soc_limit",
|
||||
translation_key="battery_discharge_soc_limit",
|
||||
api_key="wdisChargeSOCLowLimit", # Key returned by V1 API
|
||||
write_key="discharge_stop_soc", # Key used to write parameter
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
GrowattNumberEntityDescription(
|
||||
key="battery_discharge_soc_limit_on_grid",
|
||||
translation_key="battery_discharge_soc_limit_on_grid",
|
||||
api_key="onGridDischargeStopSOC", # Key returned by V1 API (on-grid)
|
||||
write_key="on_grid_discharge_stop_soc", # Key used to write parameter
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -53,11 +53,8 @@
|
||||
"battery_discharge_power_limit": {
|
||||
"name": "Battery discharge power limit"
|
||||
},
|
||||
"battery_discharge_soc_limit_off_grid": {
|
||||
"name": "Battery discharge SOC limit (off-grid)"
|
||||
},
|
||||
"battery_discharge_soc_limit_on_grid": {
|
||||
"name": "Battery discharge SOC limit (on-grid)"
|
||||
"battery_discharge_soc_limit": {
|
||||
"name": "Battery discharge SOC limit"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -164,10 +164,10 @@
|
||||
"name": "Relay"
|
||||
},
|
||||
"tx0plus5": {
|
||||
"name": "TX0 force +5V"
|
||||
"name": "TX0 force +5v"
|
||||
},
|
||||
"tx1plus5": {
|
||||
"name": "TX1 force +5V"
|
||||
"name": "TX1 force +5v"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -50,44 +50,6 @@ class IntegrationNotFoundFlow(RepairsFlow):
|
||||
)
|
||||
|
||||
|
||||
class OrphanedConfigEntryFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
def __init__(self, data: dict[str, str]) -> None:
|
||||
"""Initialize."""
|
||||
self.entry_id = data["entry_id"]
|
||||
self.description_placeholders = data
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=["confirm", "ignore"],
|
||||
description_placeholders=self.description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
await self.hass.config_entries.async_remove(self.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
async def async_step_ignore(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the ignore step of a fix flow."""
|
||||
ir.async_get(self.hass).async_ignore(
|
||||
DOMAIN, f"orphaned_ignored_entry.{self.entry_id}", True
|
||||
)
|
||||
return self.async_abort(
|
||||
reason="issue_ignored",
|
||||
description_placeholders=self.description_placeholders,
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant, issue_id: str, data: dict[str, str] | None
|
||||
) -> RepairsFlow:
|
||||
@@ -96,7 +58,4 @@ async def async_create_fix_flow(
|
||||
if issue_id.split(".", maxsplit=1)[0] == "integration_not_found":
|
||||
assert data
|
||||
return IntegrationNotFoundFlow(data)
|
||||
if issue_id.split(".", maxsplit=1)[0] == "orphaned_ignored_entry":
|
||||
assert data
|
||||
return OrphanedConfigEntryFlow(data)
|
||||
return ConfirmRepairFlow()
|
||||
|
||||
@@ -162,24 +162,6 @@
|
||||
"description": "It's not possible to configure {platform} {domain} by adding `{platform_key}` to the {domain} configuration. Please check the documentation for more information on how to set up this integration.\n\nTo resolve this:\n1. Remove `{platform_key}` occurrences from the `{domain}:` configuration in your YAML configuration file.\n2. Restart Home Assistant.\n\nExample that should be removed:\n{yaml_example}",
|
||||
"title": "Unused YAML configuration for the {platform} integration"
|
||||
},
|
||||
"orphaned_ignored_config_entry": {
|
||||
"fix_flow": {
|
||||
"abort": {
|
||||
"issue_ignored": "Non-existent integration {domain} ignored."
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"description": "There is an ignored orphaned config entry for the `{domain}` integration. This can happen when an integration is removed, but the config entry is still present in Home Assistant.\n\nTo resolve this, press **Remove** to clean up the orphaned entry.",
|
||||
"menu_options": {
|
||||
"confirm": "Remove",
|
||||
"ignore": "Ignore"
|
||||
},
|
||||
"title": "[%key:component::homeassistant::issues::orphaned_ignored_config_entry::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Orphaned ignored config entry for {domain}"
|
||||
},
|
||||
"platform_only": {
|
||||
"description": "The {domain} integration does not support configuration under its own key, it must be configured under its supported platforms.\n\nTo resolve this:\n\n1. Remove `{domain}:` from your YAML configuration file.\n\n2. Restart Home Assistant.",
|
||||
"title": "The {domain} integration does not support YAML configuration under its own key"
|
||||
|
||||
@@ -31,7 +31,6 @@ HOMEE_UNIT_TO_HA_UNIT = {
|
||||
"n/a": None,
|
||||
"text": None,
|
||||
"%": PERCENTAGE,
|
||||
"Lux": LIGHT_LUX,
|
||||
"lx": LIGHT_LUX,
|
||||
"klx": LIGHT_LUX,
|
||||
"1/min": REVOLUTIONS_PER_MINUTE,
|
||||
|
||||
@@ -30,7 +30,6 @@ from homematicip.device import (
|
||||
PresenceDetectorIndoor,
|
||||
RoomControlDeviceAnalog,
|
||||
SmokeDetector,
|
||||
SoilMoistureSensorInterface,
|
||||
SwitchMeasuring,
|
||||
TemperatureDifferenceSensor2,
|
||||
TemperatureHumiditySensorDisplay,
|
||||
@@ -286,10 +285,6 @@ def get_device_handlers(hap: HomematicipHAP) -> dict[type, Callable]:
|
||||
EnergySensorsInterface: lambda device: _handle_energy_sensor_interface(
|
||||
hap, device
|
||||
),
|
||||
SoilMoistureSensorInterface: lambda device: [
|
||||
HomematicipSoilMoistureSensor(hap, device),
|
||||
HomematicipSoilTemperatureSensor(hap, device),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -627,7 +622,6 @@ class HomematicipAbsoluteHumiditySensor(HomematicipGenericEntity, SensorEntity):
|
||||
|
||||
_attr_device_class = SensorDeviceClass.ABSOLUTE_HUMIDITY
|
||||
_attr_native_unit_of_measurement = CONCENTRATION_GRAMS_PER_CUBIC_METER
|
||||
_attr_suggested_display_precision = 1
|
||||
_attr_suggested_unit_of_measurement = CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
@@ -642,7 +636,7 @@ class HomematicipAbsoluteHumiditySensor(HomematicipGenericEntity, SensorEntity):
|
||||
if value is None or value == "":
|
||||
return None
|
||||
|
||||
return value
|
||||
return round(value, 3)
|
||||
|
||||
|
||||
class HomematicipIlluminanceSensor(HomematicipGenericEntity, SensorEntity):
|
||||
@@ -1037,48 +1031,6 @@ class HmipSmokeDetectorSensor(HomematicipGenericEntity, SensorEntity):
|
||||
return self.entity_description.value_fn(self._device)
|
||||
|
||||
|
||||
class HomematicipSoilMoistureSensor(HomematicipGenericEntity, SensorEntity):
|
||||
"""Representation of the HomematicIP soil moisture sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.MOISTURE
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, hap: HomematicipHAP, device) -> None:
|
||||
"""Initialize the soil moisture sensor device."""
|
||||
super().__init__(
|
||||
hap, device, post="Soil Moisture", channel=1, is_multi_channel=True
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | None:
|
||||
"""Return the state."""
|
||||
if self.functional_channel is None:
|
||||
return None
|
||||
return self.functional_channel.soilMoisture
|
||||
|
||||
|
||||
class HomematicipSoilTemperatureSensor(HomematicipGenericEntity, SensorEntity):
|
||||
"""Representation of the HomematicIP soil temperature sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, hap: HomematicipHAP, device) -> None:
|
||||
"""Initialize the soil temperature sensor device."""
|
||||
super().__init__(
|
||||
hap, device, post="Soil Temperature", channel=1, is_multi_channel=True
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state."""
|
||||
if self.functional_channel is None:
|
||||
return None
|
||||
return self.functional_channel.soilTemperature
|
||||
|
||||
|
||||
def _get_wind_direction(wind_direction_degree: float) -> str:
|
||||
"""Convert wind direction degree to named direction."""
|
||||
if 11.25 <= wind_direction_degree < 33.75:
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homevolt",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["homevolt==0.4.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
|
||||
@@ -33,13 +33,13 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -3,18 +3,14 @@
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers import discovery
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up HTML5 from a config entry."""
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {}
|
||||
)
|
||||
await discovery.async_load_platform(
|
||||
hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {}
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import ATTR_VAPID_EMAIL, ATTR_VAPID_PRV_KEY, ATTR_VAPID_PUB_KEY, DOMAIN
|
||||
from .issues import async_create_html5_issue
|
||||
|
||||
|
||||
def vapid_generate_private_key() -> str:
|
||||
@@ -91,3 +92,14 @@ class HTML5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_import(
|
||||
self: HTML5ConfigFlow, import_config: dict
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle config import from yaml."""
|
||||
_, flow_result = self._async_create_html5_entry(import_config)
|
||||
if not flow_result:
|
||||
async_create_html5_issue(self.hass, False)
|
||||
return self.async_abort(reason="invalid_config")
|
||||
async_create_html5_issue(self.hass, True)
|
||||
return flow_result
|
||||
|
||||
50
homeassistant/components/html5/issues.py
Normal file
50
homeassistant/components/html5/issues.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Issues utility for HTML5."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SUCCESSFUL_IMPORT_TRANSLATION_KEY = "deprecated_yaml"
|
||||
FAILED_IMPORT_TRANSLATION_KEY = "deprecated_yaml_import_issue"
|
||||
|
||||
INTEGRATION_TITLE = "HTML5 Push Notifications"
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_html5_issue(hass: HomeAssistant, import_success: bool) -> None:
|
||||
"""Create issues for HTML5."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/html5",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["http_ece", "py_vapid", "pywebpush"],
|
||||
"requirements": ["pywebpush==2.3.0", "py_vapid==1.9.4"],
|
||||
"requirements": ["pywebpush==1.14.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -4,15 +4,15 @@ from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, NotRequired, TypedDict, cast
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
import uuid
|
||||
|
||||
from aiohttp import ClientSession, web
|
||||
from aiohttp.hdrs import AUTHORIZATION
|
||||
import jwt
|
||||
from py_vapid import Vapid
|
||||
@@ -27,18 +27,18 @@ from homeassistant.components.notify import (
|
||||
ATTR_TARGET,
|
||||
ATTR_TITLE,
|
||||
ATTR_TITLE_DEFAULT,
|
||||
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
|
||||
BaseNotificationService,
|
||||
)
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import ATTR_NAME, URL_ROOT
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.json import save_json
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import ensure_unique_string
|
||||
from homeassistant.util.json import load_json_object
|
||||
from homeassistant.util.json import JsonObjectType, load_json_object
|
||||
|
||||
from .const import (
|
||||
ATTR_VAPID_EMAIL,
|
||||
@@ -47,12 +47,23 @@ from .const import (
|
||||
DOMAIN,
|
||||
SERVICE_DISMISS,
|
||||
)
|
||||
from .issues import async_create_html5_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REGISTRATIONS_FILE = "html5_push_registrations.conf"
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional("gcm_sender_id"): cv.string,
|
||||
vol.Optional("gcm_api_key"): cv.string,
|
||||
vol.Required(ATTR_VAPID_PUB_KEY): cv.string,
|
||||
vol.Required(ATTR_VAPID_PRV_KEY): cv.string,
|
||||
vol.Required(ATTR_VAPID_EMAIL): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
ATTR_SUBSCRIPTION = "subscription"
|
||||
ATTR_BROWSER = "browser"
|
||||
|
||||
@@ -148,29 +159,6 @@ HTML5_SHOWNOTIFICATION_PARAMETERS = (
|
||||
)
|
||||
|
||||
|
||||
class Keys(TypedDict):
|
||||
"""Types for keys."""
|
||||
|
||||
p256dh: str
|
||||
auth: str
|
||||
|
||||
|
||||
class Subscription(TypedDict):
|
||||
"""Types for subscription."""
|
||||
|
||||
endpoint: str
|
||||
expirationTime: int | None
|
||||
keys: Keys
|
||||
|
||||
|
||||
class Registration(TypedDict):
|
||||
"""Types for registration."""
|
||||
|
||||
subscription: Subscription
|
||||
browser: str
|
||||
name: NotRequired[str]
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
@@ -178,7 +166,17 @@ async def async_get_service(
|
||||
) -> HTML5NotificationService | None:
|
||||
"""Get the HTML5 push notification service."""
|
||||
if config:
|
||||
existing_config_entry = hass.config_entries.async_entries(DOMAIN)
|
||||
if existing_config_entry:
|
||||
async_create_html5_issue(hass, True)
|
||||
return None
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
)
|
||||
return None
|
||||
|
||||
if discovery_info is None:
|
||||
return None
|
||||
|
||||
@@ -186,14 +184,11 @@ async def async_get_service(
|
||||
|
||||
registrations = await hass.async_add_executor_job(_load_config, json_path)
|
||||
|
||||
vapid_pub_key: str = discovery_info[ATTR_VAPID_PUB_KEY]
|
||||
vapid_prv_key: str = discovery_info[ATTR_VAPID_PRV_KEY]
|
||||
vapid_email: str = discovery_info[ATTR_VAPID_EMAIL]
|
||||
vapid_pub_key = discovery_info[ATTR_VAPID_PUB_KEY]
|
||||
vapid_prv_key = discovery_info[ATTR_VAPID_PRV_KEY]
|
||||
vapid_email = discovery_info[ATTR_VAPID_EMAIL]
|
||||
|
||||
@callback
|
||||
def websocket_appkey(
|
||||
_hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
def websocket_appkey(_hass, connection, msg):
|
||||
connection.send_message(websocket_api.result_message(msg["id"], vapid_pub_key))
|
||||
|
||||
websocket_api.async_register_command(
|
||||
@@ -203,16 +198,15 @@ async def async_get_service(
|
||||
hass.http.register_view(HTML5PushRegistrationView(registrations, json_path))
|
||||
hass.http.register_view(HTML5PushCallbackView(registrations))
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
return HTML5NotificationService(
|
||||
hass, session, vapid_prv_key, vapid_email, registrations, json_path
|
||||
hass, vapid_prv_key, vapid_email, registrations, json_path
|
||||
)
|
||||
|
||||
|
||||
def _load_config(filename: str) -> dict[str, Registration]:
|
||||
def _load_config(filename: str) -> JsonObjectType:
|
||||
"""Load configuration."""
|
||||
with suppress(HomeAssistantError):
|
||||
return cast(dict[str, Registration], load_json_object(filename))
|
||||
return load_json_object(filename)
|
||||
return {}
|
||||
|
||||
|
||||
@@ -222,20 +216,19 @@ class HTML5PushRegistrationView(HomeAssistantView):
|
||||
url = "/api/notify.html5"
|
||||
name = "api:notify.html5"
|
||||
|
||||
def __init__(self, registrations: dict[str, Registration], json_path: str) -> None:
|
||||
def __init__(self, registrations, json_path):
|
||||
"""Init HTML5PushRegistrationView."""
|
||||
self.registrations = registrations
|
||||
self.json_path = json_path
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
async def post(self, request):
|
||||
"""Accept the POST request for push registrations from a browser."""
|
||||
|
||||
try:
|
||||
data: Registration = await request.json()
|
||||
data = await request.json()
|
||||
except ValueError:
|
||||
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
|
||||
try:
|
||||
data = cast(Registration, REGISTER_SCHEMA(data))
|
||||
data = REGISTER_SCHEMA(data)
|
||||
except vol.Invalid as ex:
|
||||
return self.json_message(humanize_error(data, ex), HTTPStatus.BAD_REQUEST)
|
||||
|
||||
@@ -264,32 +257,28 @@ class HTML5PushRegistrationView(HomeAssistantView):
|
||||
"Error saving registration.", HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
def find_registration_name(
|
||||
self,
|
||||
data: Registration,
|
||||
suggested: str | None = None,
|
||||
):
|
||||
def find_registration_name(self, data, suggested=None):
|
||||
"""Find a registration name matching data or generate a unique one."""
|
||||
endpoint = data["subscription"]["endpoint"]
|
||||
endpoint = data.get(ATTR_SUBSCRIPTION).get(ATTR_ENDPOINT)
|
||||
for key, registration in self.registrations.items():
|
||||
subscription = registration["subscription"]
|
||||
subscription = registration.get(ATTR_SUBSCRIPTION)
|
||||
if subscription.get(ATTR_ENDPOINT) == endpoint:
|
||||
return key
|
||||
return ensure_unique_string(suggested or "unnamed device", self.registrations)
|
||||
|
||||
async def delete(self, request: web.Request):
|
||||
async def delete(self, request):
|
||||
"""Delete a registration."""
|
||||
try:
|
||||
data: dict[str, Any] = await request.json()
|
||||
data = await request.json()
|
||||
except ValueError:
|
||||
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
|
||||
|
||||
subscription: dict[str, Any] = data[ATTR_SUBSCRIPTION]
|
||||
subscription = data.get(ATTR_SUBSCRIPTION)
|
||||
|
||||
found = None
|
||||
|
||||
for key, registration in self.registrations.items():
|
||||
if registration["subscription"] == subscription:
|
||||
if registration.get(ATTR_SUBSCRIPTION) == subscription:
|
||||
found = key
|
||||
break
|
||||
|
||||
@@ -321,11 +310,11 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
url = "/api/notify.html5/callback"
|
||||
name = "api:notify.html5/callback"
|
||||
|
||||
def __init__(self, registrations: dict[str, Registration]) -> None:
|
||||
def __init__(self, registrations):
|
||||
"""Init HTML5PushCallbackView."""
|
||||
self.registrations = registrations
|
||||
|
||||
def decode_jwt(self, token: str) -> web.Response | dict[str, Any]:
|
||||
def decode_jwt(self, token):
|
||||
"""Find the registration that signed this JWT and return it."""
|
||||
|
||||
# 1. Check claims w/o verifying to see if a target is in there.
|
||||
@@ -333,12 +322,12 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
# 2a. If decode is successful, return the payload.
|
||||
# 2b. If decode is unsuccessful, return a 401.
|
||||
|
||||
target_check: dict[str, Any] = jwt.decode(
|
||||
target_check = jwt.decode(
|
||||
token, algorithms=["ES256", "HS256"], options={"verify_signature": False}
|
||||
)
|
||||
if target_check.get(ATTR_TARGET) in self.registrations:
|
||||
possible_target = self.registrations[target_check[ATTR_TARGET]]
|
||||
key = possible_target["subscription"]["keys"]["auth"]
|
||||
key = possible_target[ATTR_SUBSCRIPTION][ATTR_KEYS][ATTR_AUTH]
|
||||
with suppress(jwt.exceptions.DecodeError):
|
||||
return jwt.decode(token, key, algorithms=["ES256", "HS256"])
|
||||
|
||||
@@ -348,9 +337,7 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
|
||||
# The following is based on code from Auth0
|
||||
# https://auth0.com/docs/quickstart/backend/python
|
||||
def check_authorization_header(
|
||||
self, request: web.Request
|
||||
) -> web.Response | dict[str, Any]:
|
||||
def check_authorization_header(self, request):
|
||||
"""Check the authorization header."""
|
||||
if not (auth := request.headers.get(AUTHORIZATION)):
|
||||
return self.json_message(
|
||||
@@ -379,18 +366,18 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
)
|
||||
return payload
|
||||
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
async def post(self, request):
|
||||
"""Accept the POST request for push registrations event callback."""
|
||||
auth_check = self.check_authorization_header(request)
|
||||
if not isinstance(auth_check, dict):
|
||||
return auth_check
|
||||
|
||||
try:
|
||||
data: dict[str, str] = await request.json()
|
||||
data = await request.json()
|
||||
except ValueError:
|
||||
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
|
||||
|
||||
event_payload: dict[str, Any] = {
|
||||
event_payload = {
|
||||
ATTR_TAG: data.get(ATTR_TAG),
|
||||
ATTR_TYPE: data[ATTR_TYPE],
|
||||
ATTR_TARGET: auth_check[ATTR_TARGET],
|
||||
@@ -418,17 +405,8 @@ class HTML5PushCallbackView(HomeAssistantView):
|
||||
class HTML5NotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for HTML5."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
session: ClientSession,
|
||||
vapid_prv: str,
|
||||
vapid_email: str,
|
||||
registrations: dict[str, Registration],
|
||||
json_path: str,
|
||||
) -> None:
|
||||
def __init__(self, hass, vapid_prv, vapid_email, registrations, json_path):
|
||||
"""Initialize the service."""
|
||||
self.session = session
|
||||
self._vapid_prv = vapid_prv
|
||||
self._vapid_email = vapid_email
|
||||
self.registrations = registrations
|
||||
@@ -436,7 +414,7 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
|
||||
async def async_dismiss_message(service: ServiceCall) -> None:
|
||||
"""Handle dismissing notification message service calls."""
|
||||
kwargs: dict[str, Any] = {}
|
||||
kwargs = {}
|
||||
|
||||
if self.targets is not None:
|
||||
kwargs[ATTR_TARGET] = self.targets
|
||||
@@ -455,25 +433,29 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
)
|
||||
|
||||
@property
|
||||
def targets(self) -> dict[str, str]:
|
||||
def targets(self):
|
||||
"""Return a dictionary of registered targets."""
|
||||
return {registration: registration for registration in self.registrations}
|
||||
|
||||
async def async_dismiss(self, **kwargs: Any) -> None:
|
||||
def dismiss(self, **kwargs):
|
||||
"""Dismisses a notification."""
|
||||
data = kwargs.get(ATTR_DATA)
|
||||
tag = data.get(ATTR_TAG) if data else ""
|
||||
payload = {ATTR_TAG: tag, ATTR_DISMISS: True, ATTR_DATA: {}}
|
||||
|
||||
self._push_message(payload, **kwargs)
|
||||
|
||||
async def async_dismiss(self, **kwargs):
|
||||
"""Dismisses a notification.
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
data: dict[str, Any] | None = kwargs.get(ATTR_DATA)
|
||||
tag: str = data.get(ATTR_TAG, "") if data else ""
|
||||
payload = {ATTR_TAG: tag, ATTR_DISMISS: True, ATTR_DATA: {}}
|
||||
await self.hass.async_add_executor_job(partial(self.dismiss, **kwargs))
|
||||
|
||||
await self._push_message(payload, **kwargs)
|
||||
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a user."""
|
||||
tag = str(uuid.uuid4())
|
||||
payload: dict[str, Any] = {
|
||||
payload = {
|
||||
"badge": "/static/images/notification-badge.png",
|
||||
"body": message,
|
||||
ATTR_DATA: {},
|
||||
@@ -481,12 +463,12 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
ATTR_TAG: tag,
|
||||
ATTR_TITLE: kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
|
||||
}
|
||||
data: dict[str, Any] | None = kwargs.get(ATTR_DATA)
|
||||
if data:
|
||||
|
||||
if data := kwargs.get(ATTR_DATA):
|
||||
# Pick out fields that should go into the notification directly vs
|
||||
# into the notification data dictionary.
|
||||
|
||||
data_tmp: dict[str, Any] = {}
|
||||
data_tmp = {}
|
||||
|
||||
for key, val in data.items():
|
||||
if key in HTML5_SHOWNOTIFICATION_PARAMETERS:
|
||||
@@ -502,14 +484,14 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
):
|
||||
payload[ATTR_DATA][ATTR_URL] = URL_ROOT
|
||||
|
||||
await self._push_message(payload, **kwargs)
|
||||
self._push_message(payload, **kwargs)
|
||||
|
||||
async def _push_message(self, payload: dict[str, Any], **kwargs: Any) -> None:
|
||||
def _push_message(self, payload, **kwargs):
|
||||
"""Send the message."""
|
||||
|
||||
timestamp = int(time.time())
|
||||
ttl = int(kwargs.get(ATTR_TTL, DEFAULT_TTL))
|
||||
priority: str = kwargs.get(ATTR_PRIORITY, DEFAULT_PRIORITY)
|
||||
priority = kwargs.get(ATTR_PRIORITY, DEFAULT_PRIORITY)
|
||||
if priority not in ["normal", "high"]:
|
||||
priority = DEFAULT_PRIORITY
|
||||
payload["timestamp"] = timestamp * 1000 # Javascript ms since epoch
|
||||
@@ -520,25 +502,22 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
for target in list(targets):
|
||||
info = self.registrations.get(target)
|
||||
try:
|
||||
info = cast(Registration, REGISTER_SCHEMA(info))
|
||||
info = REGISTER_SCHEMA(info)
|
||||
except vol.Invalid:
|
||||
_LOGGER.error(
|
||||
"%s is not a valid HTML5 push notification target", target
|
||||
)
|
||||
continue
|
||||
subscription = info["subscription"]
|
||||
subscription = info[ATTR_SUBSCRIPTION]
|
||||
payload[ATTR_DATA][ATTR_JWT] = add_jwt(
|
||||
timestamp,
|
||||
target,
|
||||
payload[ATTR_TAG],
|
||||
subscription["keys"]["auth"],
|
||||
subscription[ATTR_KEYS][ATTR_AUTH],
|
||||
)
|
||||
webpusher = WebPusher(info[ATTR_SUBSCRIPTION])
|
||||
|
||||
webpusher = WebPusher(
|
||||
cast(dict[str, Any], info["subscription"]), aiohttp_session=self.session
|
||||
)
|
||||
|
||||
endpoint = urlparse(subscription["endpoint"])
|
||||
endpoint = urlparse(subscription[ATTR_ENDPOINT])
|
||||
vapid_claims = {
|
||||
"sub": f"mailto:{self._vapid_email}",
|
||||
"aud": f"{endpoint.scheme}://{endpoint.netloc}",
|
||||
@@ -546,35 +525,29 @@ class HTML5NotificationService(BaseNotificationService):
|
||||
}
|
||||
vapid_headers = Vapid.from_string(self._vapid_prv).sign(vapid_claims)
|
||||
vapid_headers.update({"urgency": priority, "priority": priority})
|
||||
|
||||
response = await webpusher.send_async(
|
||||
response = webpusher.send(
|
||||
data=json.dumps(payload), headers=vapid_headers, ttl=ttl
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert not isinstance(response, str)
|
||||
|
||||
if response.status == HTTPStatus.GONE:
|
||||
if response.status_code == 410:
|
||||
_LOGGER.info("Notification channel has expired")
|
||||
reg = self.registrations.pop(target)
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
save_json, self.registrations_json_path, self.registrations
|
||||
)
|
||||
save_json(self.registrations_json_path, self.registrations)
|
||||
except HomeAssistantError:
|
||||
self.registrations[target] = reg
|
||||
_LOGGER.error("Error saving registration")
|
||||
else:
|
||||
_LOGGER.info("Configuration saved")
|
||||
elif response.status >= HTTPStatus.BAD_REQUEST:
|
||||
elif response.status_code > 399:
|
||||
_LOGGER.error(
|
||||
"There was an issue sending the notification %s: %s",
|
||||
response.status,
|
||||
await response.text(),
|
||||
response.status_code,
|
||||
response.text,
|
||||
)
|
||||
|
||||
|
||||
def add_jwt(timestamp: int, target: str, tag: str, jwt_secret: str) -> str:
|
||||
def add_jwt(timestamp, target, tag, jwt_secret):
|
||||
"""Create JWT json to put into payload."""
|
||||
|
||||
jwt_exp = datetime.fromtimestamp(timestamp) + timedelta(days=JWT_VALID_DAYS)
|
||||
|
||||
@@ -7,16 +7,11 @@ incorrect behavior, and are thus not wanted in the demo integration.
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
from functools import partial
|
||||
from random import random
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.labs import (
|
||||
EventLabsUpdatedData,
|
||||
async_is_preview_feature_enabled,
|
||||
async_subscribe_preview_feature,
|
||||
)
|
||||
from homeassistant.components.labs import async_is_preview_feature_enabled, async_listen
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
@@ -133,16 +128,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
# Subscribe to labs feature updates for kitchen_sink preview repair
|
||||
entry.async_on_unload(
|
||||
async_subscribe_preview_feature(
|
||||
async_listen(
|
||||
hass,
|
||||
domain=DOMAIN,
|
||||
preview_feature="special_repair",
|
||||
listener=partial(_async_update_special_repair, hass),
|
||||
listener=lambda: _async_update_special_repair(hass),
|
||||
)
|
||||
)
|
||||
|
||||
# Check if lab feature is currently enabled and create repair if so
|
||||
await _async_update_special_repair(hass)
|
||||
_async_update_special_repair(hass)
|
||||
|
||||
return True
|
||||
|
||||
@@ -171,22 +166,15 @@ async def async_remove_config_entry_device(
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_special_repair(
|
||||
hass: HomeAssistant,
|
||||
event_data: EventLabsUpdatedData | None = None,
|
||||
) -> None:
|
||||
@callback
|
||||
def _async_update_special_repair(hass: HomeAssistant) -> None:
|
||||
"""Create or delete the special repair issue.
|
||||
|
||||
Creates a repair issue when the special_repair lab feature is enabled,
|
||||
and deletes it when disabled. This demonstrates how lab features can interact
|
||||
with Home Assistant's repair system.
|
||||
"""
|
||||
enabled = (
|
||||
event_data["enabled"]
|
||||
if event_data is not None
|
||||
else async_is_preview_feature_enabled(hass, DOMAIN, "special_repair")
|
||||
)
|
||||
if enabled:
|
||||
if async_is_preview_feature_enabled(hass, DOMAIN, "special_repair"):
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -114,26 +114,24 @@ class KnxYamlBinarySensor(_KnxBinarySensor, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX binary sensor."""
|
||||
self._device = XknxBinarySensor(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[CONF_STATE_ADDRESS],
|
||||
invert=config[CONF_INVERT],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
|
||||
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
|
||||
reset_after=config.get(CONF_RESET_AFTER),
|
||||
always_callback=True,
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address_state),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxBinarySensor(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[CONF_STATE_ADDRESS],
|
||||
invert=config[CONF_INVERT],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
|
||||
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
|
||||
reset_after=config.get(CONF_RESET_AFTER),
|
||||
always_callback=True,
|
||||
),
|
||||
)
|
||||
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_force_update = self._device.ignore_internal_state
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address_state)
|
||||
|
||||
|
||||
class KnxUiBinarySensor(_KnxBinarySensor, KnxUiEntity):
|
||||
|
||||
@@ -35,18 +35,19 @@ class KNXButton(KnxYamlEntity, ButtonEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX button."""
|
||||
self._device = XknxRawValue(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
payload_length=config[CONF_PAYLOAD_LENGTH],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
)
|
||||
self._payload = config[CONF_PAYLOAD]
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=f"{self._device.remote_value.group_address}_{self._payload}",
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxRawValue(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
payload_length=config[CONF_PAYLOAD_LENGTH],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
),
|
||||
)
|
||||
self._payload = config[CONF_PAYLOAD]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.remote_value.group_address}_{self._payload}"
|
||||
)
|
||||
|
||||
async def async_press(self) -> None:
|
||||
|
||||
@@ -119,7 +119,7 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def _create_climate_yaml(xknx: XKNX, config: ConfigType) -> XknxClimate:
|
||||
def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate:
|
||||
"""Return a KNX Climate device to be used within XKNX."""
|
||||
climate_mode = XknxClimateMode(
|
||||
xknx,
|
||||
@@ -646,17 +646,9 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of a KNX climate device."""
|
||||
self._device = _create_climate_yaml(knx_module.xknx, config)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=(
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
|
||||
),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=_create_climate(knx_module.xknx, config),
|
||||
)
|
||||
default_hvac_mode: HVACMode = config[ClimateConf.DEFAULT_CONTROLLER_MODE]
|
||||
fan_max_step = config[ClimateConf.FAN_MAX_STEP]
|
||||
@@ -668,6 +660,14 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
|
||||
fan_zero_mode=fan_zero_mode,
|
||||
)
|
||||
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address_state}_"
|
||||
f"{self._device.target_temperature.group_address}_"
|
||||
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
|
||||
)
|
||||
|
||||
|
||||
class KnxUiClimate(_KnxClimate, KnxUiEntity):
|
||||
"""Representation of a KNX climate device configured from the UI."""
|
||||
|
||||
@@ -191,34 +191,36 @@ class KnxYamlCover(_KnxCover, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize the cover."""
|
||||
self._device = XknxCover(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
|
||||
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
|
||||
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
|
||||
group_address_position_state=config.get(
|
||||
CoverSchema.CONF_POSITION_STATE_ADDRESS
|
||||
),
|
||||
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
|
||||
group_address_angle_state=config.get(CoverSchema.CONF_ANGLE_STATE_ADDRESS),
|
||||
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
|
||||
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
|
||||
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
|
||||
invert_updown=config[CoverConf.INVERT_UPDOWN],
|
||||
invert_position=config[CoverConf.INVERT_POSITION],
|
||||
invert_angle=config[CoverConf.INVERT_ANGLE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=(
|
||||
f"{self._device.updown.group_address}_"
|
||||
f"{self._device.position_target.group_address}"
|
||||
device=XknxCover(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
|
||||
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
|
||||
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
|
||||
group_address_position_state=config.get(
|
||||
CoverSchema.CONF_POSITION_STATE_ADDRESS
|
||||
),
|
||||
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
|
||||
group_address_angle_state=config.get(
|
||||
CoverSchema.CONF_ANGLE_STATE_ADDRESS
|
||||
),
|
||||
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
|
||||
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
|
||||
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
|
||||
invert_updown=config[CoverConf.INVERT_UPDOWN],
|
||||
invert_position=config[CoverConf.INVERT_POSITION],
|
||||
invert_angle=config[CoverConf.INVERT_ANGLE],
|
||||
),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
)
|
||||
self.init_base()
|
||||
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.updown.group_address}_"
|
||||
f"{self._device.position_target.group_address}"
|
||||
)
|
||||
if custom_device_class := config.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = custom_device_class
|
||||
|
||||
|
||||
@@ -105,21 +105,20 @@ class KnxYamlDate(_KNXDate, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX date."""
|
||||
self._device = XknxDateDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxDateDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiDate(_KNXDate, KnxUiEntity):
|
||||
|
||||
@@ -110,21 +110,20 @@ class KnxYamlDateTime(_KNXDateTime, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX datetime."""
|
||||
self._device = XknxDateTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxDateTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiDateTime(_KNXDateTime, KnxUiEntity):
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from xknx.devices import Device as XknxDevice
|
||||
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, EntityCategory
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, EntityCategory
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
@@ -52,11 +52,14 @@ class _KnxEntityBase(Entity):
|
||||
"""Representation of a KNX entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
_attr_unique_id: str
|
||||
_knx_module: KNXModule
|
||||
_device: XknxDevice
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the KNX device."""
|
||||
return self._device.name
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
@@ -97,23 +100,16 @@ class _KnxEntityBase(Entity):
|
||||
class KnxYamlEntity(_KnxEntityBase):
|
||||
"""Representation of a KNX entity configured from YAML."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
knx_module: KNXModule,
|
||||
unique_id: str,
|
||||
name: str,
|
||||
entity_category: EntityCategory | None,
|
||||
) -> None:
|
||||
def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None:
|
||||
"""Initialize the YAML entity."""
|
||||
self._knx_module = knx_module
|
||||
self._attr_name = name or None
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_entity_category = entity_category
|
||||
self._device = device
|
||||
|
||||
|
||||
class KnxUiEntity(_KnxEntityBase):
|
||||
"""Representation of a KNX UI entity."""
|
||||
|
||||
_attr_unique_id: str
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
@@ -121,8 +117,6 @@ class KnxUiEntity(_KnxEntityBase):
|
||||
) -> None:
|
||||
"""Initialize the UI entity."""
|
||||
self._knx_module = knx_module
|
||||
|
||||
self._attr_name = entity_config[CONF_NAME]
|
||||
self._attr_unique_id = unique_id
|
||||
if entity_category := entity_config.get(CONF_ENTITY_CATEGORY):
|
||||
self._attr_entity_category = EntityCategory(entity_category)
|
||||
|
||||
@@ -208,32 +208,35 @@ class KnxYamlFan(_KnxFan, KnxYamlEntity):
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX fan."""
|
||||
max_step = config.get(FanConf.MAX_STEP)
|
||||
self._device = XknxFan(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_speed=config.get(KNX_ADDRESS),
|
||||
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
|
||||
group_address_oscillation=config.get(FanSchema.CONF_OSCILLATION_ADDRESS),
|
||||
group_address_oscillation_state=config.get(
|
||||
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
|
||||
),
|
||||
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
|
||||
group_address_switch_state=config.get(FanSchema.CONF_SWITCH_STATE_ADDRESS),
|
||||
max_step=max_step,
|
||||
sync_state=config.get(CONF_SYNC_STATE, True),
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=(
|
||||
str(self._device.speed.group_address)
|
||||
if self._device.speed.group_address
|
||||
else str(self._device.switch.group_address)
|
||||
device=XknxFan(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_speed=config.get(KNX_ADDRESS),
|
||||
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
|
||||
group_address_oscillation=config.get(
|
||||
FanSchema.CONF_OSCILLATION_ADDRESS
|
||||
),
|
||||
group_address_oscillation_state=config.get(
|
||||
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
|
||||
),
|
||||
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
|
||||
group_address_switch_state=config.get(
|
||||
FanSchema.CONF_SWITCH_STATE_ADDRESS
|
||||
),
|
||||
max_step=max_step,
|
||||
sync_state=config.get(CONF_SYNC_STATE, True),
|
||||
),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
)
|
||||
# FanSpeedMode.STEP if max_step is set
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
if self._device.speed.group_address:
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
else:
|
||||
self._attr_unique_id = str(self._device.switch.group_address)
|
||||
|
||||
|
||||
class KnxUiFan(_KnxFan, KnxUiEntity):
|
||||
|
||||
@@ -558,16 +558,15 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX light."""
|
||||
self._device = _create_yaml_light(knx_module.xknx, config)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=self._device_unique_id(),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=_create_yaml_light(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_color_mode = next(iter(self.supported_color_modes))
|
||||
self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN]
|
||||
self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = self._device_unique_id()
|
||||
|
||||
def _device_unique_id(self) -> str:
|
||||
"""Return unique id for this device."""
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"xknx==3.15.0",
|
||||
"xknx==3.14.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2026.2.13.222258"
|
||||
],
|
||||
|
||||
@@ -46,13 +46,12 @@ class KNXNotify(KnxYamlEntity, NotifyEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX notification."""
|
||||
self._device = _create_notification_instance(knx_module.xknx, config)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=_create_notification_instance(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
async def async_send_message(self, message: str, title: str | None = None) -> None:
|
||||
"""Send a notification to knx bus."""
|
||||
|
||||
@@ -109,19 +109,16 @@ class KnxYamlNumber(_KnxNumber, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX number."""
|
||||
self._device = NumericValue(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=config[CONF_TYPE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.sensor_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=NumericValue(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=config[CONF_TYPE],
|
||||
),
|
||||
)
|
||||
self._attr_native_max_value = config.get(
|
||||
NumberConf.MAX,
|
||||
@@ -136,6 +133,8 @@ class KnxYamlNumber(_KnxNumber, KnxYamlEntity):
|
||||
NumberConf.STEP,
|
||||
self._device.sensor_value.dpt_class.resolution,
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address)
|
||||
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
|
||||
self._device.sensor_value.value = max(0, self._attr_native_min_value)
|
||||
|
||||
|
||||
@@ -83,19 +83,18 @@ class KnxYamlScene(_KnxScene, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize KNX scene."""
|
||||
self._device = XknxScene(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=(
|
||||
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
|
||||
device=XknxScene(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
|
||||
),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = (
|
||||
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.components.cover import (
|
||||
)
|
||||
from homeassistant.components.number import NumberMode
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
|
||||
CONF_STATE_CLASS,
|
||||
DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
STATE_CLASSES_SCHEMA,
|
||||
)
|
||||
@@ -64,7 +64,6 @@ from .const import (
|
||||
NumberConf,
|
||||
SceneConf,
|
||||
)
|
||||
from .dpt import get_supported_dpts
|
||||
from .validation import (
|
||||
backwards_compatible_xknx_climate_enum_member,
|
||||
dpt_base_type_validator,
|
||||
@@ -75,7 +74,6 @@ from .validation import (
|
||||
string_type_validator,
|
||||
sync_state_validator,
|
||||
validate_number_attributes,
|
||||
validate_sensor_attributes,
|
||||
)
|
||||
|
||||
|
||||
@@ -145,13 +143,6 @@ def select_options_sub_validator(entity_config: OrderedDict) -> OrderedDict:
|
||||
return entity_config
|
||||
|
||||
|
||||
def _sensor_attribute_sub_validator(config: dict) -> dict:
|
||||
"""Validate that state_class is compatible with device_class and unit_of_measurement."""
|
||||
transcoder: type[DPTBase] = DPTBase.parse_transcoder(config[CONF_TYPE]) # type: ignore[assignment] # already checked in sensor_type_validator
|
||||
dpt_metadata = get_supported_dpts()[transcoder.dpt_number_str()]
|
||||
return validate_sensor_attributes(dpt_metadata, config)
|
||||
|
||||
|
||||
#########
|
||||
# EVENT
|
||||
#########
|
||||
@@ -195,22 +186,16 @@ class KNXPlatformSchema(ABC):
|
||||
}
|
||||
|
||||
|
||||
COMMON_ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=""): cv.string,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class BinarySensorSchema(KNXPlatformSchema):
|
||||
"""Voluptuous schema for KNX binary sensors."""
|
||||
|
||||
PLATFORM = Platform.BINARY_SENSOR
|
||||
DEFAULT_NAME = "KNX Binary Sensor"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_INVERT, default=False): cv.boolean,
|
||||
@@ -220,6 +205,7 @@ class BinarySensorSchema(KNXPlatformSchema):
|
||||
),
|
||||
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
)
|
||||
@@ -231,6 +217,7 @@ class ButtonSchema(KNXPlatformSchema):
|
||||
PLATFORM = Platform.BUTTON
|
||||
|
||||
CONF_VALUE = "value"
|
||||
DEFAULT_NAME = "KNX Button"
|
||||
|
||||
payload_or_value_msg = f"Please use only one of `{CONF_PAYLOAD}` or `{CONF_VALUE}`"
|
||||
length_or_type_msg = (
|
||||
@@ -238,8 +225,9 @@ class ButtonSchema(KNXPlatformSchema):
|
||||
)
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(KNX_ADDRESS): ga_validator,
|
||||
vol.Exclusive(
|
||||
CONF_PAYLOAD, "payload_or_value", msg=payload_or_value_msg
|
||||
@@ -253,6 +241,7 @@ class ButtonSchema(KNXPlatformSchema):
|
||||
vol.Exclusive(
|
||||
CONF_TYPE, "length_or_type", msg=length_or_type_msg
|
||||
): object,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -320,6 +309,7 @@ class ClimateSchema(KNXPlatformSchema):
|
||||
CONF_SWING_HORIZONTAL_ADDRESS = "swing_horizontal_address"
|
||||
CONF_SWING_HORIZONTAL_STATE_ADDRESS = "swing_horizontal_state_address"
|
||||
|
||||
DEFAULT_NAME = "KNX Climate"
|
||||
DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010"
|
||||
DEFAULT_SETPOINT_SHIFT_MAX = 6
|
||||
DEFAULT_SETPOINT_SHIFT_MIN = -6
|
||||
@@ -328,8 +318,9 @@ class ClimateSchema(KNXPlatformSchema):
|
||||
DEFAULT_FAN_SPEED_MODE = "percent"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(
|
||||
ClimateConf.SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX
|
||||
): vol.All(int, vol.Range(min=0, max=32)),
|
||||
@@ -429,10 +420,12 @@ class CoverSchema(KNXPlatformSchema):
|
||||
CONF_ANGLE_STATE_ADDRESS = "angle_state_address"
|
||||
|
||||
DEFAULT_TRAVEL_TIME = 25
|
||||
DEFAULT_NAME = "KNX Cover"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_MOVE_LONG_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_MOVE_SHORT_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STOP_ADDRESS): ga_list_validator,
|
||||
@@ -450,6 +443,7 @@ class CoverSchema(KNXPlatformSchema):
|
||||
vol.Optional(CoverConf.INVERT_POSITION, default=False): cv.boolean,
|
||||
vol.Optional(CoverConf.INVERT_ANGLE, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -474,12 +468,16 @@ class DateSchema(KNXPlatformSchema):
|
||||
|
||||
PLATFORM = Platform.DATE
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX Date"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -489,12 +487,16 @@ class DateTimeSchema(KNXPlatformSchema):
|
||||
|
||||
PLATFORM = Platform.DATETIME
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX DateTime"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -556,9 +558,12 @@ class FanSchema(KNXPlatformSchema):
|
||||
CONF_SWITCH_ADDRESS = "switch_address"
|
||||
CONF_SWITCH_STATE_ADDRESS = "switch_state_address"
|
||||
|
||||
DEFAULT_NAME = "KNX Fan"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_SWITCH_ADDRESS): ga_list_validator,
|
||||
@@ -566,6 +571,7 @@ class FanSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(FanConf.MAX_STEP): cv.byte,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
}
|
||||
),
|
||||
@@ -610,6 +616,7 @@ class LightSchema(KNXPlatformSchema):
|
||||
CONF_MIN_KELVIN = "min_kelvin"
|
||||
CONF_MAX_KELVIN = "max_kelvin"
|
||||
|
||||
DEFAULT_NAME = "KNX Light"
|
||||
DEFAULT_COLOR_TEMP_MODE = "absolute"
|
||||
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
|
||||
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
|
||||
@@ -641,8 +648,9 @@ class LightSchema(KNXPlatformSchema):
|
||||
)
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_BRIGHTNESS_ADDRESS): ga_list_validator,
|
||||
@@ -692,6 +700,7 @@ class LightSchema(KNXPlatformSchema):
|
||||
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
@@ -737,10 +746,14 @@ class NotifySchema(KNXPlatformSchema):
|
||||
|
||||
PLATFORM = Platform.NOTIFY
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX Notify"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
|
||||
vol.Required(KNX_ADDRESS): ga_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -749,10 +762,12 @@ class NumberSchema(KNXPlatformSchema):
|
||||
"""Voluptuous schema for KNX numbers."""
|
||||
|
||||
PLATFORM = Platform.NUMBER
|
||||
DEFAULT_NAME = "KNX Number"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Optional(CONF_MODE, default=NumberMode.AUTO): vol.Coerce(
|
||||
NumberMode
|
||||
@@ -777,12 +792,15 @@ class SceneSchema(KNXPlatformSchema):
|
||||
|
||||
CONF_SCENE_NUMBER = "scene_number"
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX SCENE"
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Required(SceneConf.SCENE_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=64)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -794,10 +812,12 @@ class SelectSchema(KNXPlatformSchema):
|
||||
|
||||
CONF_OPTION = "option"
|
||||
CONF_OPTIONS = "options"
|
||||
DEFAULT_NAME = "KNX Select"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Required(CONF_PAYLOAD_LENGTH): vol.All(
|
||||
@@ -811,6 +831,7 @@ class SelectSchema(KNXPlatformSchema):
|
||||
],
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
select_options_sub_validator,
|
||||
@@ -825,19 +846,19 @@ class SensorSchema(KNXPlatformSchema):
|
||||
CONF_ALWAYS_CALLBACK = "always_callback"
|
||||
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
|
||||
CONF_SYNC_STATE = CONF_SYNC_STATE
|
||||
DEFAULT_NAME = "KNX Sensor"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
COMMON_ENTITY_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SENSOR_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
}
|
||||
),
|
||||
_sensor_attribute_sub_validator,
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
|
||||
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
|
||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -849,13 +870,16 @@ class SwitchSchema(KNXPlatformSchema):
|
||||
CONF_INVERT = CONF_INVERT
|
||||
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX Switch"
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_INVERT, default=False): cv.boolean,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SWITCH_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -865,13 +889,17 @@ class TextSchema(KNXPlatformSchema):
|
||||
|
||||
PLATFORM = Platform.TEXT
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX Text"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
|
||||
vol.Optional(CONF_MODE, default=TextMode.TEXT): vol.Coerce(TextMode),
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -881,12 +909,16 @@ class TimeSchema(KNXPlatformSchema):
|
||||
|
||||
PLATFORM = Platform.TIME
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
DEFAULT_NAME = "KNX Time"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -911,21 +943,27 @@ class WeatherSchema(KNXPlatformSchema):
|
||||
CONF_KNX_AIR_PRESSURE_ADDRESS = "address_air_pressure"
|
||||
CONF_KNX_HUMIDITY_ADDRESS = "address_humidity"
|
||||
|
||||
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
||||
}
|
||||
DEFAULT_NAME = "KNX Weather Station"
|
||||
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -65,12 +65,9 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX select."""
|
||||
self._device = _create_raw_value(knx_module.xknx, config)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=_create_raw_value(knx_module.xknx, config),
|
||||
)
|
||||
self._option_payloads: dict[str, int] = {
|
||||
option[SelectSchema.CONF_OPTION]: option[CONF_PAYLOAD]
|
||||
@@ -78,6 +75,8 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
|
||||
}
|
||||
self._attr_options = list(self._option_payloads)
|
||||
self._attr_current_option = None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
|
||||
@@ -202,34 +202,29 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of a KNX sensor."""
|
||||
self._device = XknxSensor(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
always_callback=True,
|
||||
value_type=config[CONF_TYPE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.sensor_value.group_address_state),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxSensor(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
always_callback=True,
|
||||
value_type=config[CONF_TYPE],
|
||||
),
|
||||
)
|
||||
dpt_string = self._device.sensor_value.dpt_class.dpt_number_str()
|
||||
dpt_info = get_supported_dpts()[dpt_string]
|
||||
|
||||
if device_class := config.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = device_class
|
||||
else:
|
||||
self._attr_device_class = dpt_info["sensor_device_class"]
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, self._device.ha_device_class()
|
||||
)
|
||||
|
||||
self._attr_state_class = (
|
||||
config.get(CONF_STATE_CLASS) or dpt_info["sensor_state_class"]
|
||||
)
|
||||
|
||||
self._attr_native_unit_of_measurement = dpt_info["unit"]
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
self._attr_extra_state_attributes = {}
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,9 @@ from homeassistant.components.number import (
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
|
||||
DEVICE_CLASS_STATE_CLASSES,
|
||||
DEVICE_CLASS_UNITS as SENSOR_DEVICE_CLASS_UNITS,
|
||||
STATE_CLASS_UNITS,
|
||||
SensorDeviceClass,
|
||||
SensorStateClass,
|
||||
)
|
||||
@@ -50,7 +52,7 @@ from ..const import (
|
||||
SceneConf,
|
||||
)
|
||||
from ..dpt import get_supported_dpts
|
||||
from ..validation import validate_number_attributes, validate_sensor_attributes
|
||||
from ..validation import validate_number_attributes
|
||||
from .const import (
|
||||
CONF_ALWAYS_CALLBACK,
|
||||
CONF_COLOR,
|
||||
@@ -682,11 +684,62 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _sensor_attribute_sub_validator(config: dict) -> dict:
|
||||
def _validate_sensor_attributes(config: dict) -> dict:
|
||||
"""Validate that state_class is compatible with device_class and unit_of_measurement."""
|
||||
dpt = config[CONF_GA_SENSOR][CONF_DPT]
|
||||
dpt_metadata = get_supported_dpts()[dpt]
|
||||
return validate_sensor_attributes(dpt_metadata, config)
|
||||
state_class = config.get(
|
||||
CONF_SENSOR_STATE_CLASS,
|
||||
dpt_metadata["sensor_state_class"],
|
||||
)
|
||||
device_class = config.get(
|
||||
CONF_DEVICE_CLASS,
|
||||
dpt_metadata["sensor_device_class"],
|
||||
)
|
||||
unit_of_measurement = config.get(
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
dpt_metadata["unit"],
|
||||
)
|
||||
if (
|
||||
state_class
|
||||
and device_class
|
||||
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
|
||||
and state_class not in state_classes
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"State class '{state_class}' is not valid for device class '{device_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, state_classes), key=str.casefold))}",
|
||||
path=[CONF_SENSOR_STATE_CLASS],
|
||||
)
|
||||
if (
|
||||
device_class
|
||||
and (d_c_units := SENSOR_DEVICE_CLASS_UNITS.get(device_class)) is not None
|
||||
and unit_of_measurement not in d_c_units
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Unit of measurement '{unit_of_measurement}' is not valid for device class '{device_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, d_c_units), key=str.casefold))}",
|
||||
path=(
|
||||
[CONF_DEVICE_CLASS]
|
||||
if CONF_DEVICE_CLASS in config
|
||||
else [CONF_UNIT_OF_MEASUREMENT]
|
||||
),
|
||||
)
|
||||
if (
|
||||
state_class
|
||||
and (s_c_units := STATE_CLASS_UNITS.get(state_class)) is not None
|
||||
and unit_of_measurement not in s_c_units
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Unit of measurement '{unit_of_measurement}' is not valid for state class '{state_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, s_c_units), key=str.casefold))}",
|
||||
path=(
|
||||
[CONF_SENSOR_STATE_CLASS]
|
||||
if CONF_SENSOR_STATE_CLASS in config
|
||||
else [CONF_UNIT_OF_MEASUREMENT]
|
||||
),
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
SENSOR_KNX_SCHEMA = AllSerializeFirst(
|
||||
@@ -735,7 +788,7 @@ SENSOR_KNX_SCHEMA = AllSerializeFirst(
|
||||
),
|
||||
},
|
||||
),
|
||||
_sensor_attribute_sub_validator,
|
||||
_validate_sensor_attributes,
|
||||
)
|
||||
|
||||
KNX_SCHEMA_FOR_PLATFORM = {
|
||||
|
||||
@@ -107,21 +107,20 @@ class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX switch."""
|
||||
self._device = XknxSwitch(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
invert=config[SwitchSchema.CONF_INVERT],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.switch.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxSwitch(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
invert=config[SwitchSchema.CONF_INVERT],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
|
||||
self._attr_unique_id = str(self._device.switch.group_address)
|
||||
|
||||
|
||||
class KnxUiSwitch(_KnxSwitch, KnxUiEntity):
|
||||
|
||||
@@ -45,7 +45,6 @@ class TelegramDict(DecodedTelegramPayload):
|
||||
"""Represent a Telegram as a dict."""
|
||||
|
||||
# this has to be in sync with the frontend implementation
|
||||
data_secure: bool | None
|
||||
destination: str
|
||||
destination_name: str
|
||||
direction: str
|
||||
@@ -154,7 +153,6 @@ class Telegrams:
|
||||
value = _serializable_decoded_data(telegram.decoded_data.value)
|
||||
|
||||
return TelegramDict(
|
||||
data_secure=telegram.data_secure,
|
||||
destination=f"{telegram.destination_address}",
|
||||
destination_name=dst_name,
|
||||
direction=telegram.direction.value,
|
||||
|
||||
@@ -112,21 +112,20 @@ class KnxYamlText(_KnxText, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX text."""
|
||||
self._device = XknxNotification(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=config[CONF_TYPE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxNotification(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=config[CONF_TYPE],
|
||||
),
|
||||
)
|
||||
self._attr_mode = config[CONF_MODE]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiText(_KnxText, KnxUiEntity):
|
||||
|
||||
@@ -105,21 +105,20 @@ class KnxYamlTime(_KNXTime, KnxYamlEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX time."""
|
||||
self._device = XknxTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device.remote_value.group_address),
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=XknxTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiTime(_KNXTime, KnxUiEntity):
|
||||
|
||||
@@ -14,17 +14,11 @@ from xknx.telegram.address import IndividualAddress, parse_device_group_address
|
||||
from homeassistant.components.number import (
|
||||
DEVICE_CLASS_UNITS as NUMBER_DEVICE_CLASS_UNITS,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
|
||||
DEVICE_CLASS_STATE_CLASSES,
|
||||
DEVICE_CLASS_UNITS,
|
||||
STATE_CLASS_UNITS,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import NumberConf
|
||||
from .dpt import DPTInfo, get_supported_dpts
|
||||
from .dpt import get_supported_dpts
|
||||
|
||||
|
||||
def dpt_subclass_validator(dpt_base_class: type[DPTBase]) -> Callable[[Any], str | int]:
|
||||
@@ -225,65 +219,3 @@ def validate_number_attributes(
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def validate_sensor_attributes(
|
||||
dpt_info: DPTInfo, config: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Validate that state_class is compatible with device_class and unit_of_measurement.
|
||||
|
||||
Works for both, UI and YAML configuration schema since they
|
||||
share same names for all tested attributes.
|
||||
"""
|
||||
state_class = config.get(
|
||||
CONF_SENSOR_STATE_CLASS,
|
||||
dpt_info["sensor_state_class"],
|
||||
)
|
||||
device_class = config.get(
|
||||
CONF_DEVICE_CLASS,
|
||||
dpt_info["sensor_device_class"],
|
||||
)
|
||||
unit_of_measurement = config.get(
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
dpt_info["unit"],
|
||||
)
|
||||
if (
|
||||
state_class
|
||||
and device_class
|
||||
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
|
||||
and state_class not in state_classes
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"State class '{state_class}' is not valid for device class '{device_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, state_classes), key=str.casefold))}",
|
||||
path=[CONF_SENSOR_STATE_CLASS],
|
||||
)
|
||||
if (
|
||||
device_class
|
||||
and (d_c_units := DEVICE_CLASS_UNITS.get(device_class)) is not None
|
||||
and unit_of_measurement not in d_c_units
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Unit of measurement '{unit_of_measurement}' is not valid for device class '{device_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, d_c_units), key=str.casefold))}",
|
||||
path=(
|
||||
[CONF_DEVICE_CLASS]
|
||||
if CONF_DEVICE_CLASS in config
|
||||
else [CONF_UNIT_OF_MEASUREMENT]
|
||||
),
|
||||
)
|
||||
if (
|
||||
state_class
|
||||
and (s_c_units := STATE_CLASS_UNITS.get(state_class)) is not None
|
||||
and unit_of_measurement not in s_c_units
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Unit of measurement '{unit_of_measurement}' is not valid for state class '{state_class}'. "
|
||||
f"Valid options are: {', '.join(sorted(map(str, s_c_units), key=str.casefold))}",
|
||||
path=(
|
||||
[CONF_SENSOR_STATE_CLASS]
|
||||
if CONF_SENSOR_STATE_CLASS in config
|
||||
else [CONF_UNIT_OF_MEASUREMENT]
|
||||
),
|
||||
)
|
||||
return config
|
||||
|
||||
@@ -85,13 +85,12 @@ class KNXWeather(KnxYamlEntity, WeatherEntity):
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of a KNX sensor."""
|
||||
self._device = _create_weather(knx_module.xknx, config)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=str(self._device._temperature.group_address_state), # noqa: SLF001
|
||||
name=config[CONF_NAME],
|
||||
entity_category=config.get(CONF_ENTITY_CATEGORY),
|
||||
device=_create_weather(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_unique_id = str(self._device._temperature.group_address_state) # noqa: SLF001
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
@property
|
||||
def native_temperature(self) -> float | None:
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import Any
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.frame import report_usage
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
@@ -80,8 +79,6 @@ def async_listen(
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Deprecated: use async_subscribe_preview_feature instead.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
@@ -91,11 +88,6 @@ def async_listen(
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
report_usage(
|
||||
"calls `async_listen` which is deprecated, "
|
||||
"use `async_subscribe_preview_feature` instead",
|
||||
breaks_in_ha_version="2027.3.0",
|
||||
)
|
||||
|
||||
async def _listener(_event_data: EventLabsUpdatedData) -> None:
|
||||
listener()
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pypck==0.9.11", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.9.10", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,11 @@ from pyliebherrhomeapi import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -53,7 +57,7 @@ class LiebherrCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
try:
|
||||
await self.client.get_device(self.device_id)
|
||||
except LiebherrAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Invalid API key") from err
|
||||
raise ConfigEntryError("Invalid API key") from err
|
||||
except LiebherrConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Failed to connect to device {self.device_id}: {err}"
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==13.2.0"]
|
||||
"requirements": ["ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==13.2.0"]
|
||||
"requirements": ["ical==12.1.3"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ from mastodon.Mastodon import (
|
||||
Mastodon,
|
||||
MastodonError,
|
||||
MastodonNotFoundError,
|
||||
MastodonUnauthorizedError,
|
||||
)
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -19,7 +18,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import slugify
|
||||
@@ -49,11 +48,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: MastodonConfigEntry) ->
|
||||
entry,
|
||||
)
|
||||
|
||||
except MastodonUnauthorizedError as error:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_failed",
|
||||
) from error
|
||||
except MastodonError as ex:
|
||||
raise ConfigEntryNotReady("Failed to connect") from ex
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from mastodon.Mastodon import (
|
||||
@@ -44,28 +43,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
|
||||
}
|
||||
)
|
||||
REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_ACCESS_TOKEN,
|
||||
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
|
||||
}
|
||||
)
|
||||
STEP_RECONFIGURE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CLIENT_ID,
|
||||
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
|
||||
vol.Required(
|
||||
CONF_CLIENT_SECRET,
|
||||
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
|
||||
vol.Required(
|
||||
CONF_ACCESS_TOKEN,
|
||||
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
|
||||
}
|
||||
)
|
||||
|
||||
EXAMPLE_URL = "https://mastodon.social"
|
||||
|
||||
|
||||
def base_url_from_url(url: str) -> str:
|
||||
@@ -73,26 +50,18 @@ def base_url_from_url(url: str) -> str:
|
||||
return str(URL(url).origin())
|
||||
|
||||
|
||||
def remove_email_link(account_name: str) -> str:
|
||||
"""Remove email link from account name."""
|
||||
|
||||
# Replaces the @ with a HTML entity to prevent mailto links.
|
||||
return account_name.replace("@", "@")
|
||||
|
||||
|
||||
class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
base_url: str
|
||||
client_id: str
|
||||
client_secret: str
|
||||
access_token: str
|
||||
|
||||
def check_connection(
|
||||
self,
|
||||
base_url: str,
|
||||
client_id: str,
|
||||
client_secret: str,
|
||||
access_token: str,
|
||||
) -> tuple[
|
||||
InstanceV2 | Instance | None,
|
||||
Account | None,
|
||||
@@ -101,10 +70,10 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Check connection to the Mastodon instance."""
|
||||
try:
|
||||
client = create_mastodon_client(
|
||||
self.base_url,
|
||||
self.client_id,
|
||||
self.client_secret,
|
||||
self.access_token,
|
||||
base_url,
|
||||
client_id,
|
||||
client_secret,
|
||||
access_token,
|
||||
)
|
||||
try:
|
||||
instance = client.instance_v2()
|
||||
@@ -148,13 +117,12 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input:
|
||||
user_input[CONF_BASE_URL] = base_url_from_url(user_input[CONF_BASE_URL])
|
||||
|
||||
self.base_url = user_input[CONF_BASE_URL]
|
||||
self.client_id = user_input[CONF_CLIENT_ID]
|
||||
self.client_secret = user_input[CONF_CLIENT_SECRET]
|
||||
self.access_token = user_input[CONF_ACCESS_TOKEN]
|
||||
|
||||
instance, account, errors = await self.hass.async_add_executor_job(
|
||||
self.check_connection
|
||||
self.check_connection,
|
||||
user_input[CONF_BASE_URL],
|
||||
user_input[CONF_CLIENT_ID],
|
||||
user_input[CONF_CLIENT_SECRET],
|
||||
user_input[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
|
||||
if not errors:
|
||||
@@ -169,81 +137,5 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.show_user_form(
|
||||
user_input,
|
||||
errors,
|
||||
description_placeholders={"example_url": EXAMPLE_URL},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
self.base_url = entry_data[CONF_BASE_URL]
|
||||
self.client_id = entry_data[CONF_CLIENT_ID]
|
||||
self.client_secret = entry_data[CONF_CLIENT_SECRET]
|
||||
self.access_token = entry_data[CONF_ACCESS_TOKEN]
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input:
|
||||
self.access_token = user_input[CONF_ACCESS_TOKEN]
|
||||
instance, account, errors = await self.hass.async_add_executor_job(
|
||||
self.check_connection
|
||||
)
|
||||
if not errors:
|
||||
name = construct_mastodon_username(instance, account)
|
||||
await self.async_set_unique_id(slugify(name))
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN]},
|
||||
)
|
||||
account_name = self._get_reauth_entry().title
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=REAUTH_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"account_name": remove_email_link(account_name),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
self.base_url = reconfigure_entry.data[CONF_BASE_URL]
|
||||
self.client_id = user_input[CONF_CLIENT_ID]
|
||||
self.client_secret = user_input[CONF_CLIENT_SECRET]
|
||||
self.access_token = user_input[CONF_ACCESS_TOKEN]
|
||||
instance, account, errors = await self.hass.async_add_executor_job(
|
||||
self.check_connection
|
||||
)
|
||||
if not errors:
|
||||
name = construct_mastodon_username(instance, account)
|
||||
await self.async_set_unique_id(slugify(name))
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates={
|
||||
CONF_CLIENT_ID: user_input[CONF_CLIENT_ID],
|
||||
CONF_CLIENT_SECRET: user_input[CONF_CLIENT_SECRET],
|
||||
CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN],
|
||||
},
|
||||
)
|
||||
account_name = reconfigure_entry.title
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"account_name": remove_email_link(account_name),
|
||||
},
|
||||
description_placeholders={"example_url": "https://mastodon.social"},
|
||||
)
|
||||
|
||||
@@ -12,7 +12,6 @@ DATA_HASS_CONFIG = "mastodon_hass_config"
|
||||
DEFAULT_URL: Final = "https://mastodon.social"
|
||||
DEFAULT_NAME: Final = "Mastodon"
|
||||
|
||||
ATTR_ACCOUNT_NAME = "account_name"
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_VISIBILITY = "visibility"
|
||||
ATTR_IDEMPOTENCY_KEY = "idempotency_key"
|
||||
|
||||
@@ -32,9 +32,6 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_account": {
|
||||
"service": "mdi:account-search"
|
||||
},
|
||||
"post": {
|
||||
"service": "mdi:message-text"
|
||||
}
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["mastodon"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["Mastodon.py==2.1.2"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,10 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
Waiting to move to oAuth.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
@@ -64,7 +67,10 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
Waiting to move to OAuth.
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
|
||||
@@ -5,22 +5,15 @@ from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from mastodon import Mastodon
|
||||
from mastodon.Mastodon import Account, MastodonAPIError, MediaAttachment
|
||||
from mastodon.Mastodon import MastodonAPIError, MediaAttachment
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import service
|
||||
|
||||
from .const import (
|
||||
ATTR_ACCOUNT_NAME,
|
||||
ATTR_CONTENT_WARNING,
|
||||
ATTR_IDEMPOTENCY_KEY,
|
||||
ATTR_LANGUAGE,
|
||||
@@ -44,13 +37,6 @@ class StatusVisibility(StrEnum):
|
||||
DIRECT = "direct"
|
||||
|
||||
|
||||
SERVICE_GET_ACCOUNT = "get_account"
|
||||
SERVICE_GET_ACCOUNT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): str,
|
||||
vol.Required(ATTR_ACCOUNT_NAME): str,
|
||||
}
|
||||
)
|
||||
SERVICE_POST = "post"
|
||||
SERVICE_POST_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -70,127 +56,95 @@ SERVICE_POST_SCHEMA = vol.Schema(
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Mastodon integration."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GET_ACCOUNT,
|
||||
_async_get_account,
|
||||
schema=SERVICE_GET_ACCOUNT_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_POST, _async_post, schema=SERVICE_POST_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
async def _async_get_account(call: ServiceCall) -> ServiceResponse:
|
||||
"""Get account information."""
|
||||
entry: MastodonConfigEntry = service.async_get_config_entry(
|
||||
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
client = entry.runtime_data.client
|
||||
|
||||
account_name: str = call.data[ATTR_ACCOUNT_NAME]
|
||||
|
||||
try:
|
||||
account: Account = await call.hass.async_add_executor_job(
|
||||
partial(client.account_lookup, acct=account_name)
|
||||
async def async_post(call: ServiceCall) -> ServiceResponse:
|
||||
"""Post a status."""
|
||||
entry: MastodonConfigEntry = service.async_get_config_entry(
|
||||
hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
except MastodonAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unable_to_get_account",
|
||||
translation_placeholders={"account_name": account_name},
|
||||
) from err
|
||||
client = entry.runtime_data.client
|
||||
|
||||
return {"account": account}
|
||||
status: str = call.data[ATTR_STATUS]
|
||||
|
||||
|
||||
async def _async_post(call: ServiceCall) -> ServiceResponse:
|
||||
"""Post a status."""
|
||||
entry: MastodonConfigEntry = service.async_get_config_entry(
|
||||
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
client = entry.runtime_data.client
|
||||
|
||||
status: str = call.data[ATTR_STATUS]
|
||||
|
||||
visibility: str | None = (
|
||||
StatusVisibility(call.data[ATTR_VISIBILITY])
|
||||
if ATTR_VISIBILITY in call.data
|
||||
else None
|
||||
)
|
||||
idempotency_key: str | None = call.data.get(ATTR_IDEMPOTENCY_KEY)
|
||||
spoiler_text: str | None = call.data.get(ATTR_CONTENT_WARNING)
|
||||
language: str | None = call.data.get(ATTR_LANGUAGE)
|
||||
media_path: str | None = call.data.get(ATTR_MEDIA)
|
||||
media_description: str | None = call.data.get(ATTR_MEDIA_DESCRIPTION)
|
||||
media_warning: str | None = call.data.get(ATTR_MEDIA_WARNING)
|
||||
|
||||
if idempotency_key and len(idempotency_key) < 4:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="idempotency_key_too_short",
|
||||
visibility: str | None = (
|
||||
StatusVisibility(call.data[ATTR_VISIBILITY])
|
||||
if ATTR_VISIBILITY in call.data
|
||||
else None
|
||||
)
|
||||
idempotency_key: str | None = call.data.get(ATTR_IDEMPOTENCY_KEY)
|
||||
spoiler_text: str | None = call.data.get(ATTR_CONTENT_WARNING)
|
||||
language: str | None = call.data.get(ATTR_LANGUAGE)
|
||||
media_path: str | None = call.data.get(ATTR_MEDIA)
|
||||
media_description: str | None = call.data.get(ATTR_MEDIA_DESCRIPTION)
|
||||
media_warning: str | None = call.data.get(ATTR_MEDIA_WARNING)
|
||||
|
||||
await call.hass.async_add_executor_job(
|
||||
partial(
|
||||
_post,
|
||||
hass=call.hass,
|
||||
client=client,
|
||||
status=status,
|
||||
visibility=visibility,
|
||||
idempotency_key=idempotency_key,
|
||||
spoiler_text=spoiler_text,
|
||||
language=language,
|
||||
media_path=media_path,
|
||||
media_description=media_description,
|
||||
sensitive=media_warning,
|
||||
)
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _post(hass: HomeAssistant, client: Mastodon, **kwargs: Any) -> None:
|
||||
"""Post to Mastodon."""
|
||||
|
||||
media_data: MediaAttachment | None = None
|
||||
|
||||
media_path = kwargs.get("media_path")
|
||||
if media_path:
|
||||
if not hass.config.is_allowed_path(media_path):
|
||||
raise HomeAssistantError(
|
||||
if idempotency_key and len(idempotency_key) < 4:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_whitelisted_directory",
|
||||
translation_placeholders={"media": media_path},
|
||||
translation_key="idempotency_key_too_short",
|
||||
)
|
||||
|
||||
media_type = get_media_type(media_path)
|
||||
media_description = kwargs.get("media_description")
|
||||
await hass.async_add_executor_job(
|
||||
partial(
|
||||
_post,
|
||||
client=client,
|
||||
status=status,
|
||||
visibility=visibility,
|
||||
idempotency_key=idempotency_key,
|
||||
spoiler_text=spoiler_text,
|
||||
language=language,
|
||||
media_path=media_path,
|
||||
media_description=media_description,
|
||||
sensitive=media_warning,
|
||||
)
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _post(client: Mastodon, **kwargs: Any) -> None:
|
||||
"""Post to Mastodon."""
|
||||
|
||||
media_data: MediaAttachment | None = None
|
||||
|
||||
media_path = kwargs.get("media_path")
|
||||
if media_path:
|
||||
if not hass.config.is_allowed_path(media_path):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_whitelisted_directory",
|
||||
translation_placeholders={"media": media_path},
|
||||
)
|
||||
|
||||
media_type = get_media_type(media_path)
|
||||
media_description = kwargs.get("media_description")
|
||||
try:
|
||||
media_data = client.media_post(
|
||||
media_file=media_path,
|
||||
mime_type=media_type,
|
||||
description=media_description,
|
||||
)
|
||||
|
||||
except MastodonAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unable_to_upload_image",
|
||||
translation_placeholders={"media_path": media_path},
|
||||
) from err
|
||||
|
||||
kwargs.pop("media_path", None)
|
||||
kwargs.pop("media_description", None)
|
||||
|
||||
try:
|
||||
media_data = client.media_post(
|
||||
media_file=media_path,
|
||||
mime_type=media_type,
|
||||
description=media_description,
|
||||
)
|
||||
|
||||
media_ids: str | None = None
|
||||
if media_data:
|
||||
media_ids = media_data.id
|
||||
client.status_post(media_ids=media_ids, **kwargs)
|
||||
except MastodonAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unable_to_upload_image",
|
||||
translation_placeholders={"media_path": media_path},
|
||||
translation_key="unable_to_send_message",
|
||||
) from err
|
||||
|
||||
kwargs.pop("media_path", None)
|
||||
kwargs.pop("media_description", None)
|
||||
|
||||
media_ids: str | None = None
|
||||
if media_data:
|
||||
media_ids = media_data.id
|
||||
try:
|
||||
client.status_post(media_ids=media_ids, **kwargs)
|
||||
except MastodonAPIError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unable_to_send_message",
|
||||
) from err
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_POST, async_post, schema=SERVICE_POST_SCHEMA
|
||||
)
|
||||
|
||||
@@ -1,14 +1,3 @@
|
||||
get_account:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: mastodon
|
||||
account_name:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
post:
|
||||
fields:
|
||||
config_entry_id:
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_account": "You have to use the same account that was used to configure the integration."
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"network_error": "The Mastodon instance was not found.",
|
||||
@@ -13,28 +9,6 @@
|
||||
"unknown": "Unknown error occurred when connecting to the Mastodon instance."
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "[%key:component::mastodon::config::step::user::data_description::access_token%]"
|
||||
},
|
||||
"description": "Please reauthenticate {account_name} with Mastodon."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]",
|
||||
"client_id": "[%key:component::mastodon::config::step::user::data::client_id%]",
|
||||
"client_secret": "[%key:component::mastodon::config::step::user::data::client_secret%]"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "[%key:component::mastodon::config::step::user::data_description::access_token%]",
|
||||
"client_id": "[%key:component::mastodon::config::step::user::data_description::client_id%]",
|
||||
"client_secret": "[%key:component::mastodon::config::step::user::data_description::client_secret%]"
|
||||
},
|
||||
"description": "Reconfigure {account_name} with Mastodon."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]",
|
||||
@@ -95,18 +69,12 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_failed": {
|
||||
"message": "Authentication failed, please reauthenticate with Mastodon."
|
||||
},
|
||||
"idempotency_key_too_short": {
|
||||
"message": "Idempotency key must be at least 4 characters long."
|
||||
},
|
||||
"not_whitelisted_directory": {
|
||||
"message": "{media} is not a whitelisted directory."
|
||||
},
|
||||
"unable_to_get_account": {
|
||||
"message": "Unable to get account \"{account_name}\"."
|
||||
},
|
||||
"unable_to_send_message": {
|
||||
"message": "Unable to send message."
|
||||
},
|
||||
@@ -125,20 +93,6 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_account": {
|
||||
"description": "Gets information about a Mastodon account.",
|
||||
"fields": {
|
||||
"account_name": {
|
||||
"description": "The Mastodon account username (e.g. @user@instance).",
|
||||
"name": "Account name"
|
||||
},
|
||||
"config_entry_id": {
|
||||
"description": "Select the Mastodon instance to use to search.",
|
||||
"name": "Mastodon instance"
|
||||
}
|
||||
},
|
||||
"name": "Get account"
|
||||
},
|
||||
"post": {
|
||||
"description": "Posts a status on your Mastodon account.",
|
||||
"fields": {
|
||||
|
||||
@@ -124,13 +124,8 @@ class MatterEntity(Entity):
|
||||
and ep.has_attribute(None, entity_info.primary_attribute)
|
||||
):
|
||||
self._name_postfix = str(self._endpoint.endpoint_id)
|
||||
# Always set translation_key for state_attributes translations.
|
||||
# For primary entities (no postfix), suppress the translated name,
|
||||
# so only the device name is used.
|
||||
if self._platform_translation_key and not self.translation_key:
|
||||
self._attr_translation_key = self._platform_translation_key
|
||||
if not self._name_postfix:
|
||||
self._attr_name = None
|
||||
if self._platform_translation_key and not self.translation_key:
|
||||
self._attr_translation_key = self._platform_translation_key
|
||||
|
||||
# Matter labels can be used to modify the entity name
|
||||
# by appending the text.
|
||||
|
||||
@@ -722,8 +722,8 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.SENSOR,
|
||||
entity_description=MatterSensorEntityDescription(
|
||||
key="NitrogenDioxideSensor",
|
||||
translation_key="nitrogen_dioxide",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
|
||||
@@ -33,9 +33,6 @@
|
||||
"get_recipes": {
|
||||
"service": "mdi:book-open-page-variant"
|
||||
},
|
||||
"get_shopping_list_items": {
|
||||
"service": "mdi:basket"
|
||||
},
|
||||
"import_recipe": {
|
||||
"service": "mdi:map-search"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.2.1"]
|
||||
"requirements": ["aiomealie==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ from aiomealie import (
|
||||
from awesomeversion import AwesomeVersion
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.todo import DOMAIN as TODO_DOMAIN
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, ATTR_DATE
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -65,8 +64,6 @@ SERVICE_GET_RECIPES_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_SHOPPING_LIST_ITEMS = "get_shopping_list_items"
|
||||
|
||||
SERVICE_IMPORT_RECIPE = "import_recipe"
|
||||
SERVICE_IMPORT_RECIPE_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -324,12 +321,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
schema=SERVICE_SET_MEALPLAN_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_SHOPPING_LIST_ITEMS,
|
||||
entity_domain=TODO_DOMAIN,
|
||||
schema=None,
|
||||
func="async_get_shopping_list_items",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
@@ -45,12 +45,6 @@ get_recipes:
|
||||
mode: box
|
||||
unit_of_measurement: recipes
|
||||
|
||||
get_shopping_list_items:
|
||||
target:
|
||||
entity:
|
||||
integration: mealie
|
||||
domain: todo
|
||||
|
||||
import_recipe:
|
||||
fields:
|
||||
config_entry_id:
|
||||
|
||||
@@ -147,9 +147,6 @@
|
||||
"setup_failed": {
|
||||
"message": "Could not connect to the Mealie instance."
|
||||
},
|
||||
"shopping_list_not_found": {
|
||||
"message": "Shopping list with name or ID `{shopping_list}` not found."
|
||||
},
|
||||
"update_failed_mealplan": {
|
||||
"message": "Could not fetch mealplan data."
|
||||
},
|
||||
@@ -230,10 +227,6 @@
|
||||
},
|
||||
"name": "Get recipes"
|
||||
},
|
||||
"get_shopping_list_items": {
|
||||
"description": "Gets items from a shopping list in Mealie",
|
||||
"name": "Get shopping list items"
|
||||
},
|
||||
"import_recipe": {
|
||||
"description": "Imports a recipe from an URL",
|
||||
"fields": {
|
||||
|
||||
@@ -2,15 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
|
||||
from aiomealie import (
|
||||
MealieConnectionError,
|
||||
MealieError,
|
||||
MutateShoppingItem,
|
||||
ShoppingItem,
|
||||
ShoppingList,
|
||||
)
|
||||
from aiomealie import MealieError, MutateShoppingItem, ShoppingItem, ShoppingList
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
DOMAIN as TODO_DOMAIN,
|
||||
@@ -19,7 +11,7 @@ from homeassistant.components.todo import (
|
||||
TodoListEntity,
|
||||
TodoListEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -273,18 +265,3 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity):
|
||||
def available(self) -> bool:
|
||||
"""Return False if shopping list no longer available."""
|
||||
return super().available and self._shopping_list_id in self.coordinator.data
|
||||
|
||||
async def async_get_shopping_list_items(self) -> ServiceResponse:
|
||||
"""Get structured shopping list items."""
|
||||
client = self.coordinator.client
|
||||
try:
|
||||
shopping_items = await client.get_shopping_items(self._shopping_list_id)
|
||||
except MealieConnectionError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
) from err
|
||||
return {
|
||||
"name": self.shopping_list.name,
|
||||
"items": [asdict(item) for item in shopping_items.items],
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user