mirror of
https://github.com/home-assistant/core.git
synced 2025-12-18 13:58:01 +00:00
Compare commits
1 Commits
schedule/a
...
labs_helpe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35a6cce431 |
@@ -13,7 +13,6 @@ core: &core
|
||||
|
||||
# Our base platforms, that are used by other integrations
|
||||
base_platforms: &base_platforms
|
||||
- homeassistant/components/ai_task/**
|
||||
- homeassistant/components/air_quality/**
|
||||
- homeassistant/components/alarm_control_panel/**
|
||||
- homeassistant/components/assist_satellite/**
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github",
|
||||
|
||||
186
.github/workflows/builder.yml
vendored
186
.github/workflows/builder.yml
vendored
@@ -14,9 +14,7 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.12.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
BASE_IMAGE_VERSION: "2025.11.0"
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -27,10 +25,9 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -70,7 +67,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -88,7 +85,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
@@ -96,7 +93,7 @@ jobs:
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -169,7 +166,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,8 +187,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
@@ -273,7 +269,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -295,7 +291,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -311,7 +307,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -354,7 +350,13 @@ jobs:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- *install_cosign
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -364,104 +366,88 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Verify architecture image signatures
|
||||
- name: Build Meta Image
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||
|
||||
# Generate all Docker tags based on version string
|
||||
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
|
||||
# Examples:
|
||||
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
|
||||
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
|
||||
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
|
||||
- name: Generate Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.registry }}/home-assistant
|
||||
sep-tags: ","
|
||||
tags: |
|
||||
type=raw,value=${{ needs.init.outputs.version }},priority=9999
|
||||
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
for attempt in 1 2 3; do
|
||||
if docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
|
||||
break
|
||||
fi
|
||||
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
|
||||
sleep 10
|
||||
if [ "${attempt}" -eq 3 ]; then
|
||||
echo "Failed after 3 attempts"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
done
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
}
|
||||
|
||||
# Create manifest with ALL tags in a single operation (much faster!)
|
||||
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
|
||||
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
|
||||
function validate_image() {
|
||||
local image=${1}
|
||||
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
|
||||
echo "Invalid signature!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Sign each tag separately (signing requires individual tag names)
|
||||
echo "Signing all tags..."
|
||||
for tag in "${TAGS[@]}"; do
|
||||
echo "Signing ${tag}"
|
||||
cosign sign --yes "${tag}"
|
||||
done
|
||||
function push_dockerhub() {
|
||||
local image=${1}
|
||||
local tag=${2}
|
||||
|
||||
echo "All manifests created and signed successfully"
|
||||
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
|
||||
docker push "docker.io/homeassistant/${image}:${tag}"
|
||||
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
|
||||
}
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create general tags
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
create_manifest "dev" "${{ needs.init.outputs.version }}"
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
else
|
||||
create_manifest "stable" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "latest" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create series version tag (e.g. 2021.6)
|
||||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
@@ -474,7 +460,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -482,7 +468,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -519,7 +505,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: &key-pre-commit-venv >-
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
- &cache-restore-pre-commit-venv
|
||||
name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-restore actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -511,7 +511,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
@@ -534,7 +534,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -864,7 +864,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: &actions-download-artifact actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- &compile-english-translations
|
||||
@@ -1188,7 +1188,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1313,7 +1313,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@7266a7ce5c1df01b1c6db85bf8cd86c737dadbe7 # v6.0.0
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
|
||||
8
.github/workflows/wheels.yml
vendored
8
.github/workflows/wheels.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -119,7 +119,7 @@ jobs:
|
||||
|
||||
- &download-env-file
|
||||
name: Download env_file
|
||||
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: &actions-download-artifact actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
@@ -136,7 +136,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -187,7 +187,6 @@ homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyid.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
|
||||
28
CODEOWNERS
generated
28
CODEOWNERS
generated
@@ -73,8 +73,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airobot/ @mettolen
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airpatrol/ @antondalgren
|
||||
/tests/components/airpatrol/ @antondalgren
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
/tests/components/airq/ @Sibgatulin @dl2080
|
||||
/homeassistant/components/airthings/ @danielhiversen @LaStrada
|
||||
@@ -220,8 +218,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||
/homeassistant/components/blebox/ @bbx-a @swistakm
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blink/ @fronzbot
|
||||
/tests/components/blink/ @fronzbot
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
@@ -308,8 +306,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/config/ @home-assistant/core
|
||||
/homeassistant/components/configurator/ @home-assistant/core
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool @davidrecordon
|
||||
/tests/components/control4/ @lawtancool @davidrecordon
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
@@ -420,8 +418,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
/tests/components/efergy/ @tkdrob
|
||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||
/homeassistant/components/egauge/ @neggert
|
||||
/tests/components/egauge/ @neggert
|
||||
/homeassistant/components/eheimdigital/ @autinerd
|
||||
/tests/components/eheimdigital/ @autinerd
|
||||
/homeassistant/components/ekeybionyx/ @richardpolzer
|
||||
@@ -456,15 +452,13 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyid/ @JrtPec @Molier
|
||||
/tests/components/energyid/ @JrtPec @Molier
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten @SanderBlom
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
@@ -543,8 +537,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fressnapf_tracker/ @eifinger
|
||||
/tests/components/fressnapf_tracker/ @eifinger
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
@@ -575,8 +567,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/generic_hygrostat/ @Shulyaka
|
||||
/homeassistant/components/geniushub/ @manzanotti
|
||||
/tests/components/geniushub/ @manzanotti
|
||||
/homeassistant/components/gentex_homelink/ @niaexa @ryanjones-gentex
|
||||
/tests/components/gentex_homelink/ @niaexa @ryanjones-gentex
|
||||
/homeassistant/components/geo_json_events/ @exxamalte
|
||||
/tests/components/geo_json_events/ @exxamalte
|
||||
/homeassistant/components/geo_location/ @home-assistant/core
|
||||
@@ -665,7 +655,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
/tests/components/here_travel_time/ @eifinger
|
||||
/homeassistant/components/hikvision/ @mezz64
|
||||
/tests/components/hikvision/ @mezz64
|
||||
/homeassistant/components/hikvisioncam/ @fbradyirl
|
||||
/homeassistant/components/hisense_aehw4a1/ @bannhead
|
||||
/tests/components/hisense_aehw4a1/ @bannhead
|
||||
@@ -1363,8 +1352,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ring/ @sdb9696
|
||||
/homeassistant/components/risco/ @OnFreund
|
||||
/tests/components/risco/ @OnFreund
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
@@ -1770,7 +1759,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
/tests/components/vivotek/ @HarlemSquirrel
|
||||
/homeassistant/components/vizio/ @raman325
|
||||
/tests/components/vizio/ @raman325
|
||||
/homeassistant/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
@@ -1810,8 +1798,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/weatherflow_cloud/ @jeeftor
|
||||
/homeassistant/components/weatherkit/ @tjhorner
|
||||
/tests/components/weatherkit/ @tjhorner
|
||||
/homeassistant/components/web_rtc/ @home-assistant/core
|
||||
/tests/components/web_rtc/ @home-assistant/core
|
||||
/homeassistant/components/webdav/ @jpbede
|
||||
/tests/components/webdav/ @jpbede
|
||||
/homeassistant/components/webhook/ @home-assistant/core
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -30,7 +30,7 @@ RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.17
|
||||
&& pip3 install uv==0.9.6
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -35,22 +35,25 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN --mount=type=bind,source=.python-version,target=.python-version \
|
||||
uv python install \
|
||||
&& uv venv $VIRTUAL_ENV
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
|
||||
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import Any, Final
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_LOVELACE_UPDATED,
|
||||
EVENT_PANELS_UPDATED,
|
||||
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
|
||||
@@ -46,7 +45,6 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
||||
EVENT_STATE_CHANGED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
EVENT_LABEL_REGISTRY_UPDATED,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_CATEGORY_REGISTRY_UPDATED,
|
||||
EVENT_FLOOR_REGISTRY_UPDATED,
|
||||
}
|
||||
|
||||
@@ -624,16 +624,13 @@ async def async_enable_logging(
|
||||
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ and "HA_DUPLICATE_LOG_FILE" not in os.environ:
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
def rename_old_file() -> None:
|
||||
"""Rename old log file in executor."""
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
|
||||
await hass.async_add_executor_job(rename_old_file)
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
@@ -1003,7 +1000,7 @@ class _WatchPendingSetups:
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting for integrations to complete setup: %s",
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
)
|
||||
|
||||
|
||||
@@ -9,9 +9,8 @@ from actron_neo_api import (
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER
|
||||
from .coordinator import (
|
||||
ActronAirConfigEntry,
|
||||
ActronAirRuntimeData,
|
||||
@@ -30,13 +29,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronAirAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
except ActronAirAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronAirAPIError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
|
||||
for system in systems:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
"""Setup config flow for Actron Air integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
@@ -96,16 +95,8 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
unique_id = str(user_data["id"])
|
||||
await self.async_set_unique_id(unique_id)
|
||||
|
||||
# Check if this is a reauth flow
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_data["email"],
|
||||
data={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
@@ -123,21 +114,6 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
del self.login_task
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication request."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
if user_input is not None:
|
||||
return await self.async_step_user()
|
||||
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
|
||||
async def async_step_connection_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -5,23 +5,16 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAuthError,
|
||||
ActronAirStatus,
|
||||
)
|
||||
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
STALE_DEVICE_TIMEOUT = timedelta(minutes=5)
|
||||
STALE_DEVICE_TIMEOUT = timedelta(hours=24)
|
||||
ERROR_NO_SYSTEMS_FOUND = "no_systems_found"
|
||||
ERROR_UNKNOWN = "unknown_error"
|
||||
|
||||
@@ -36,6 +29,9 @@ class ActronAirRuntimeData:
|
||||
|
||||
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
|
||||
|
||||
AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
@@ -63,14 +59,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
|
||||
async def _async_update_data(self) -> ActronAirStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
try:
|
||||
await self.api.update_status()
|
||||
except ActronAirAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
return self.status
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.2.0"]
|
||||
"requirements": ["actron-neo-api==0.1.87"]
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"oauth2_error": "Failed to start authentication flow",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You must reauthenticate with the same Actron Air account that was originally configured."
|
||||
"oauth2_error": "Failed to start OAuth2 flow"
|
||||
},
|
||||
"error": {
|
||||
"oauth2_error": "Failed to start authentication flow. Please try again later."
|
||||
"oauth2_error": "Failed to start OAuth2 flow. Please try again later."
|
||||
},
|
||||
"progress": {
|
||||
"wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes."
|
||||
@@ -18,23 +16,14 @@
|
||||
"description": "Failed to connect to Actron Air. Please check your internet connection and try again.",
|
||||
"title": "Connection error"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Your Actron Air authentication has expired. Select continue to reauthenticate with your Actron Air account. You will be prompted to log in again to restore the connection.",
|
||||
"title": "Authentication expired"
|
||||
},
|
||||
"timeout": {
|
||||
"data": {},
|
||||
"description": "The authentication process timed out. Please try again.",
|
||||
"title": "Authentication timeout"
|
||||
"description": "The authorization process timed out. Please try again.",
|
||||
"title": "Authorization timeout"
|
||||
},
|
||||
"user": {
|
||||
"title": "Actron Air Authentication"
|
||||
"title": "Actron Air OAuth2 Authorization"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Bre77"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["advantage_air"],
|
||||
"requirements": ["advantage-air==0.4.4"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Noltari"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.4"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aftership",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyaftership==21.11.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@ispysoftware"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
|
||||
@@ -101,8 +101,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
|
||||
{"accept": ["*/*"], "multiple": True}
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
@@ -118,8 +118,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
|
||||
{"accept": ["*/*"], "multiple": True}
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@asymworks"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airnow",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyairnow"],
|
||||
"requirements": ["pyairnow==1.3.1"]
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -175,56 +174,6 @@ class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
# Combine existing data with new password
|
||||
data = {
|
||||
CONF_HOST: reauth_entry.data[CONF_HOST],
|
||||
CONF_USERNAME: reauth_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"username": reauth_entry.data[CONF_USERNAME],
|
||||
"host": reauth_entry.data[CONF_HOST],
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -11,7 +11,6 @@ from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -54,15 +53,7 @@ class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except AirobotAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from err
|
||||
except AirobotConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
) from err
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Diagnostics support for Airobot."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = [CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AirobotConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Build device capabilities info
|
||||
device_capabilities = None
|
||||
if coordinator.data:
|
||||
device_capabilities = {
|
||||
"has_floor_sensor": coordinator.data.status.has_floor_sensor,
|
||||
"has_co2_sensor": coordinator.data.status.has_co2_sensor,
|
||||
"hw_version": coordinator.data.status.hw_version,
|
||||
"fw_version": coordinator.data.status.fw_version,
|
||||
}
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_CONFIG),
|
||||
"device_capabilities": device_capabilities,
|
||||
"status": asdict(coordinator.data.status) if coordinator.data else None,
|
||||
"settings": asdict(coordinator.data.settings) if coordinator.data else None,
|
||||
}
|
||||
@@ -12,6 +12,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -34,17 +34,17 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
@@ -54,8 +54,8 @@ rules:
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
"""Sensor platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pyairobotrest.models import ThermostatStatus
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Airobot sensor entity."""
|
||||
|
||||
value_fn: Callable[[ThermostatStatus], StateType | datetime]
|
||||
supported_fn: Callable[[ThermostatStatus], bool] = lambda _: True
|
||||
|
||||
|
||||
uptime_to_stable_datetime = ignore_variance(
|
||||
lambda value: utcnow().replace(microsecond=0) - timedelta(seconds=value),
|
||||
timedelta(minutes=2),
|
||||
)
|
||||
|
||||
SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_temperature",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.hum_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="floor_temperature",
|
||||
translation_key="floor_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_floor,
|
||||
supported_fn=lambda status: status.has_floor_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.co2,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_quality_index",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.aqi,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="heating_uptime",
|
||||
translation_key="heating_uptime",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.heating_uptime,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="errors",
|
||||
translation_key="errors",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.errors,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="device_uptime",
|
||||
translation_key="device_uptime",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: uptime_to_stable_datetime(status.device_uptime),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot sensor platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotSensor(coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.supported_fn(coordinator.data.status)
|
||||
)
|
||||
|
||||
|
||||
class AirobotSensor(AirobotEntity, SensorEntity):
|
||||
"""Representation of an Airobot sensor."""
|
||||
|
||||
entity_description: AirobotSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator,
|
||||
description: AirobotSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.status)
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -15,24 +14,15 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
"password": "The thermostat password."
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "The authentication for Airobot thermostat at {host} (Device ID: {username}) has expired. Please enter the password to reauthenticate. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "Device ID"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
@@ -43,32 +33,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"device_uptime": {
|
||||
"name": "Device uptime"
|
||||
},
|
||||
"errors": {
|
||||
"name": "Error count"
|
||||
},
|
||||
"floor_temperature": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"heating_uptime": {
|
||||
"name": "Heating uptime"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
"""The AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import PLATFORMS
|
||||
from .coordinator import AirPatrolConfigEntry, AirPatrolDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirPatrolConfigEntry) -> bool:
|
||||
"""Set up AirPatrol from a config entry."""
|
||||
coordinator = AirPatrolDataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirPatrolConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,208 +0,0 @@
|
||||
"""Climate platform for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
SWING_OFF,
|
||||
SWING_ON,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirPatrolConfigEntry
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
from .entity import AirPatrolEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
AP_TO_HA_HVAC_MODES = {
|
||||
"heat": HVACMode.HEAT,
|
||||
"cool": HVACMode.COOL,
|
||||
"off": HVACMode.OFF,
|
||||
}
|
||||
HA_TO_AP_HVAC_MODES = {value: key for key, value in AP_TO_HA_HVAC_MODES.items()}
|
||||
|
||||
AP_TO_HA_FAN_MODES = {
|
||||
"min": FAN_LOW,
|
||||
"max": FAN_HIGH,
|
||||
"auto": FAN_AUTO,
|
||||
}
|
||||
HA_TO_AP_FAN_MODES = {value: key for key, value in AP_TO_HA_FAN_MODES.items()}
|
||||
|
||||
AP_TO_HA_SWING_MODES = {
|
||||
"on": SWING_ON,
|
||||
"off": SWING_OFF,
|
||||
}
|
||||
HA_TO_AP_SWING_MODES = {value: key for key, value in AP_TO_HA_SWING_MODES.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirPatrolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirPatrol climate entities."""
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
AirPatrolClimate(coordinator, unit_id)
|
||||
for unit_id, unit in units.items()
|
||||
if "climate" in unit
|
||||
)
|
||||
|
||||
|
||||
class AirPatrolClimate(AirPatrolEntity, ClimateEntity):
|
||||
"""AirPatrol climate entity."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.SWING_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL, HVACMode.OFF]
|
||||
_attr_fan_modes = [FAN_LOW, FAN_HIGH, FAN_AUTO]
|
||||
_attr_swing_modes = [SWING_ON, SWING_OFF]
|
||||
_attr_min_temp = 16.0
|
||||
_attr_max_temp = 30.0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
super().__init__(coordinator, unit_id)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{unit_id}"
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data."""
|
||||
return self.device_data.get("climate") or {}
|
||||
|
||||
@property
|
||||
def params(self) -> dict[str, Any]:
|
||||
"""Return the current parameters for the climate entity."""
|
||||
return self.climate_data.get("ParametersData") or {}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bool(self.climate_data)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
if humidity := self.climate_data.get("RoomHumidity"):
|
||||
return float(humidity)
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if temp := self.climate_data.get("RoomTemp"):
|
||||
return float(temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
if temp := self.params.get("PumpTemp"):
|
||||
return float(temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
pump_power = self.params.get("PumpPower")
|
||||
pump_mode = self.params.get("PumpMode")
|
||||
|
||||
if pump_power and pump_power == "on" and pump_mode:
|
||||
return AP_TO_HA_HVAC_MODES.get(pump_mode)
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_speed = self.params.get("FanSpeed")
|
||||
if fan_speed:
|
||||
return AP_TO_HA_FAN_MODES.get(fan_speed)
|
||||
return None
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
"""Return the current swing mode."""
|
||||
swing = self.params.get("Swing")
|
||||
if swing:
|
||||
return AP_TO_HA_SWING_MODES.get(swing)
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
params = self.params.copy()
|
||||
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
temp = kwargs[ATTR_TEMPERATURE]
|
||||
params["PumpTemp"] = f"{temp:.3f}"
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
params = self.params.copy()
|
||||
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
params["PumpPower"] = "off"
|
||||
else:
|
||||
params["PumpPower"] = "on"
|
||||
params["PumpMode"] = HA_TO_AP_HVAC_MODES.get(hvac_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
params = self.params.copy()
|
||||
params["FanSpeed"] = HA_TO_AP_FAN_MODES.get(fan_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new target swing mode."""
|
||||
params = self.params.copy()
|
||||
params["Swing"] = HA_TO_AP_SWING_MODES.get(swing_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
params = self.params.copy()
|
||||
if mode := AP_TO_HA_HVAC_MODES.get(params["PumpMode"]):
|
||||
await self.async_set_hvac_mode(mode)
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.async_set_hvac_mode(HVACMode.OFF)
|
||||
|
||||
async def _async_set_params(self, params: dict[str, Any]) -> None:
|
||||
"""Set the unit to dry mode."""
|
||||
new_climate_data = self.climate_data.copy()
|
||||
new_climate_data["ParametersData"] = params
|
||||
|
||||
await self.coordinator.api.set_unit_climate_data(
|
||||
self._unit_id, new_climate_data
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -1,111 +0,0 @@
|
||||
"""Config flow for the AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from airpatrol.api import AirPatrolAPI, AirPatrolAuthenticationError, AirPatrolError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.EMAIL,
|
||||
autocomplete="email",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_api(
|
||||
hass: HomeAssistant, user_input: dict[str, str]
|
||||
) -> tuple[str | None, str | None, dict[str, str]]:
|
||||
"""Validate the API connection."""
|
||||
errors: dict[str, str] = {}
|
||||
session = async_get_clientsession(hass)
|
||||
access_token = None
|
||||
unique_id = None
|
||||
try:
|
||||
api = await AirPatrolAPI.authenticate(
|
||||
session, user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except AirPatrolAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirPatrolError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
access_token = api.get_access_token()
|
||||
unique_id = api.get_unique_id()
|
||||
|
||||
return (access_token, unique_id, errors)
|
||||
|
||||
|
||||
class AirPatrolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for AirPatrol."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
access_token, unique_id, errors = await validate_api(self.hass, user_input)
|
||||
if access_token and unique_id:
|
||||
user_input[CONF_ACCESS_TOKEN] = access_token
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication with new credentials."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication confirmation."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input:
|
||||
access_token, unique_id, errors = await validate_api(self.hass, user_input)
|
||||
if access_token and unique_id:
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
user_input[CONF_ACCESS_TOKEN] = access_token
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -1,16 +0,0 @@
|
||||
"""Constants for the AirPatrol integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airpatrol.api import AirPatrolAuthenticationError, AirPatrolError
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "airpatrol"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
AIRPATROL_ERRORS = (AirPatrolAuthenticationError, AirPatrolError)
|
||||
@@ -1,100 +0,0 @@
|
||||
"""Data update coordinator for AirPatrol."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from airpatrol.api import AirPatrolAPI, AirPatrolAuthenticationError, AirPatrolError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
|
||||
type AirPatrolConfigEntry = ConfigEntry[AirPatrolDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirPatrolDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
"""Class to manage fetching AirPatrol data."""
|
||||
|
||||
config_entry: AirPatrolConfigEntry
|
||||
api: AirPatrolAPI
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: AirPatrolConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"{DOMAIN.capitalize()} {config_entry.title}",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
await self._setup_client()
|
||||
except AirPatrolError as api_err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with AirPatrol API: {api_err}"
|
||||
) from api_err
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Update unit data from AirPatrol API."""
|
||||
return {unit_data["unit_id"]: unit_data for unit_data in await self._get_data()}
|
||||
|
||||
async def _get_data(self, retry: bool = False) -> list[dict[str, Any]]:
|
||||
"""Fetch data from API."""
|
||||
try:
|
||||
return await self.api.get_data()
|
||||
except AirPatrolAuthenticationError as auth_err:
|
||||
if retry:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication with AirPatrol failed"
|
||||
) from auth_err
|
||||
await self._update_token()
|
||||
return await self._get_data(retry=True)
|
||||
except AirPatrolError as err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with AirPatrol API: {err}"
|
||||
) from err
|
||||
|
||||
async def _update_token(self) -> None:
|
||||
"""Refresh the AirPatrol API client and update the access token."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
self.api = await AirPatrolAPI.authenticate(
|
||||
session,
|
||||
self.config_entry.data[CONF_EMAIL],
|
||||
self.config_entry.data[CONF_PASSWORD],
|
||||
)
|
||||
except AirPatrolAuthenticationError as auth_err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication with AirPatrol failed"
|
||||
) from auth_err
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data={
|
||||
**self.config_entry.data,
|
||||
CONF_ACCESS_TOKEN: self.api.get_access_token(),
|
||||
},
|
||||
)
|
||||
|
||||
async def _setup_client(self) -> None:
|
||||
"""Set up the AirPatrol API client from stored access_token."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = AirPatrolAPI(
|
||||
session,
|
||||
self.config_entry.data[CONF_ACCESS_TOKEN],
|
||||
self.config_entry.unique_id,
|
||||
)
|
||||
try:
|
||||
await api.get_data()
|
||||
except AirPatrolAuthenticationError:
|
||||
await self._update_token()
|
||||
self.api = api
|
||||
@@ -1,44 +0,0 @@
|
||||
"""Base entity for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirPatrolEntity(CoordinatorEntity[AirPatrolDataUpdateCoordinator]):
|
||||
"""Base entity for AirPatrol devices."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
) -> None:
|
||||
"""Initialize the AirPatrol entity."""
|
||||
super().__init__(coordinator)
|
||||
self._unit_id = unit_id
|
||||
device = coordinator.data[unit_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unit_id)},
|
||||
name=device["name"],
|
||||
manufacturer=device["manufacturer"],
|
||||
model=device["model"],
|
||||
serial_number=device["hwid"],
|
||||
)
|
||||
|
||||
@property
|
||||
def device_data(self) -> dict[str, Any]:
|
||||
"""Return the device data."""
|
||||
return self.coordinator.data[self._unit_id]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._unit_id in self.coordinator.data
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "airpatrol",
|
||||
"name": "AirPatrol",
|
||||
"codeowners": ["@antondalgren"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airpatrol",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airpatrol==0.1.0"]
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not provide custom actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities doesn't subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "Login credentials do not match the configured account"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "[%key:component::airpatrol::config::step::user::data_description::email%]",
|
||||
"password": "[%key:component::airpatrol::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reauthenticate with AirPatrol"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "Your AirPatrol email address",
|
||||
"password": "Your AirPatrol password"
|
||||
},
|
||||
"description": "Connect to AirPatrol"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["airthings"],
|
||||
"requirements": ["airthings-cloud==0.2.0"]
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@samsinnamon"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch4",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["airtouch4pyapi"],
|
||||
"requirements": ["airtouch4pyapi==1.0.5"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@danzel"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["airtouch5py"],
|
||||
"requirements": ["airtouch5py==0.3.0"]
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.4"]
|
||||
"requirements": ["aioairzone==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Noltari"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.7.2"]
|
||||
|
||||
@@ -159,74 +159,81 @@
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers after one or more alarms become armed, regardless of the mode.",
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed"
|
||||
"name": "When an alarm is armed"
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers after one or more alarms become armed in away mode.",
|
||||
"description": "Triggers when an alarm is armed away.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_away::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed away"
|
||||
"name": "When an alarm is armed away"
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers after one or more alarms become armed in home mode.",
|
||||
"description": "Triggers when an alarm is armed home.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_home::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed home"
|
||||
"name": "When an alarm is armed home"
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers after one or more alarms become armed in night mode.",
|
||||
"description": "Triggers when an alarm is armed night.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_night::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed night"
|
||||
"name": "When an alarm is armed night"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers after one or more alarms become armed in vacation mode.",
|
||||
"description": "Triggers when an alarm is armed vacation.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_vacation::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed vacation"
|
||||
"name": "When an alarm is armed vacation"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers after one or more alarms become disarmed.",
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm disarmed"
|
||||
"name": "When an alarm is disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers after one or more alarms become triggered.",
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm triggered"
|
||||
"name": "When an alarm is triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,10 +4,10 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityTargetStateTriggerBase,
|
||||
EntityStateTriggerBase,
|
||||
Trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
@@ -21,7 +21,7 @@ def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
@@ -38,7 +38,7 @@ class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityTargetStateTriggerBase]:
|
||||
) -> type[EntityStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
@@ -52,7 +52,7 @@ def make_entity_state_trigger_required_features(
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_entity_transition_trigger(
|
||||
"armed": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
@@ -89,12 +89,8 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED
|
||||
),
|
||||
"triggered": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = 300
|
||||
SCAN_INTERVAL = 30
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
@@ -45,7 +45,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=SCAN_INTERVAL, immediate=False
|
||||
hass, _LOGGER, cooldown=30, immediate=False
|
||||
),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==10.0.0"]
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@madpilot"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["amberelectric"],
|
||||
"requirements": ["amberelectric==2.0.12"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@engrbm87"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pydroid-ipcam==3.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
@@ -19,7 +18,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
@@ -34,11 +33,9 @@ async def async_setup_entry(
|
||||
auth = MSOB2CAuth(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
session=async_get_clientsession(hass),
|
||||
refresh_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
account_number=entry.data[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
try:
|
||||
await auth.send_refresh_request()
|
||||
@@ -48,7 +45,7 @@ async def async_setup_entry(
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
|
||||
try:
|
||||
await _aw.validate_smart_meter(entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await _aw.validate_smart_meter()
|
||||
except SmartMeterUnavailableError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="smart_meter_unavailable"
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.auth import BaseAuth, MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
InvalidAccountIdError,
|
||||
SelfAssertedError,
|
||||
@@ -30,14 +30,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(
|
||||
auth: MSOB2CAuth, account_number: str
|
||||
) -> str | MSOB2CAuth:
|
||||
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
@@ -48,7 +45,7 @@ async def validate_credentials(
|
||||
return "unknown"
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
await _aw.validate_smart_meter()
|
||||
except (InvalidAccountIdError, SmartMeterUnavailableError):
|
||||
return "smart_meter_unavailable"
|
||||
return auth
|
||||
@@ -71,21 +68,35 @@ class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
),
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
|
||||
)
|
||||
)
|
||||
if isinstance(validation_response, str):
|
||||
errors["base"] = validation_response
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
if isinstance(validation_response, BaseAuth):
|
||||
account_number = (
|
||||
user_input.get(CONF_ACCOUNT_NUMBER)
|
||||
or validation_response.account_number
|
||||
)
|
||||
await self.async_set_unique_id(account_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
title=account_number,
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
},
|
||||
)
|
||||
if validation_response == "smart_meter_unavailable":
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
),
|
||||
errors={"base": validation_response},
|
||||
)
|
||||
errors["base"] = validation_response
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
type AnglianWaterConfigEntry = ConfigEntry[AnglianWaterUpdateCoordinator]
|
||||
|
||||
@@ -44,6 +44,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
return await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
return await self.api.update()
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
|
||||
@@ -18,21 +18,17 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
|
||||
"""Defines a Anglian Water entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water entity."""
|
||||
super().__init__(coordinator)
|
||||
self.smart_meter = smart_meter
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, smart_meter.serial_number)},
|
||||
name=smart_meter.serial_number,
|
||||
name="Smart Water Meter",
|
||||
manufacturer="Anglian Water",
|
||||
serial_number=smart_meter.serial_number,
|
||||
)
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyanglianwater"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==3.1.0"]
|
||||
"requirements": ["pyanglianwater==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -108,8 +108,9 @@ class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
|
||||
description: AnglianWaterSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water sensor."""
|
||||
super().__init__(coordinator, smart_meter, description.key)
|
||||
super().__init__(coordinator, smart_meter)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
"username": "Username or email used to login to the Anglian Water website."
|
||||
},
|
||||
"description": "Enter your Anglian Water account credentials to connect to Home Assistant."
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Lash-L"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anova",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["anova_wifi"],
|
||||
"requirements": ["anova-wifi==0.17.0"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@hyralex"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthemav",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["anthemav"],
|
||||
"requirements": ["anthemav==1.4.1"]
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -37,7 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
else:
|
||||
model_id = DEFAULT[CONF_CHAT_MODEL]
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -421,8 +421,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
|
||||
@@ -583,7 +583,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@bdr99"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.15"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@yuxincs"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["zeroconf"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@elupus"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@ikalnyi"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/arve",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["asyncarve==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@milanmeu"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioaseko"],
|
||||
"requirements": ["aioaseko==1.0.0"]
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pymicro_vad import MicroVad
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -43,8 +42,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -70,49 +69,21 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
self.vad: MicroVad | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -121,5 +92,5 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=self._last_speech_probability,
|
||||
speech_probability=speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pysilero-vad==3.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
@@ -1123,6 +1123,63 @@ class PipelineRun:
|
||||
)
|
||||
|
||||
try:
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
@@ -1208,17 +1265,6 @@ class PipelineRun:
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
@@ -1230,53 +1276,6 @@ class PipelineRun:
|
||||
chat_log_delta_listener=chat_log_delta_listener,
|
||||
) as chat_log,
|
||||
):
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input, chat_log
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
chat_log,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0"]
|
||||
"requirements": ["hassil==3.4.0"]
|
||||
}
|
||||
|
||||
@@ -112,44 +112,48 @@
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers after one or more voice assistant satellites become idle after having processed a command.",
|
||||
"description": "Triggers when an Assist satellite becomes idle.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite became idle"
|
||||
"name": "When an Assist satellite becomes idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers after one or more voice assistant satellites start listening for a command from someone.",
|
||||
"description": "Triggers when an Assist satellite starts listening.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started listening"
|
||||
"name": "When an Assist satellite starts listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers after one or more voice assistant satellites start processing a command after having heard it.",
|
||||
"description": "Triggers when an Assist satellite is processing.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started processing"
|
||||
"name": "When an Assist satellite is processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers after one or more voice assistant satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"description": "Triggers when an Assist satellite is responding.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started responding"
|
||||
"name": "When an Assist satellite is responding"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_target_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.LISTENING
|
||||
),
|
||||
"processing": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"responding": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.5.4", "asusrouter==1.21.3"]
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@MatsNL"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/atag",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyatag"],
|
||||
"requirements": ["pyatag==0.3.5.3"]
|
||||
|
||||
@@ -27,8 +27,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@djtimca"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.5"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@nickw444", "@Bre77"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
"codeowners": ["@klaasnicolaas"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["autarco==3.2.0"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,10 @@ rules:
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
common-modules:
|
||||
status: todo
|
||||
comment: |
|
||||
The entity.py file is not used in this integration.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
|
||||
@@ -204,25 +204,13 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AutarcoSensorBase(CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity):
|
||||
"""Base class for Autarco sensors."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AutarcoDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Autarco sensor base."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
|
||||
|
||||
class AutarcoBatterySensorEntity(AutarcoSensorBase):
|
||||
class AutarcoBatterySensorEntity(
|
||||
CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Defines an Autarco battery sensor."""
|
||||
|
||||
entity_description: AutarcoBatterySensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -230,8 +218,10 @@ class AutarcoBatterySensorEntity(AutarcoSensorBase):
|
||||
coordinator: AutarcoDataUpdateCoordinator,
|
||||
description: AutarcoBatterySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Autarco battery sensor."""
|
||||
super().__init__(coordinator, description)
|
||||
"""Initialize Autarco sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.account_site.site_id}_battery_{description.key}"
|
||||
)
|
||||
@@ -249,10 +239,13 @@ class AutarcoBatterySensorEntity(AutarcoSensorBase):
|
||||
return self.entity_description.value_fn(self.coordinator.data.battery)
|
||||
|
||||
|
||||
class AutarcoSolarSensorEntity(AutarcoSensorBase):
|
||||
class AutarcoSolarSensorEntity(
|
||||
CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Defines an Autarco solar sensor."""
|
||||
|
||||
entity_description: AutarcoSolarSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -260,8 +253,10 @@ class AutarcoSolarSensorEntity(AutarcoSensorBase):
|
||||
coordinator: AutarcoDataUpdateCoordinator,
|
||||
description: AutarcoSolarSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Autarco solar sensor."""
|
||||
super().__init__(coordinator, description)
|
||||
"""Initialize Autarco sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.account_site.site_id}_solar_{description.key}"
|
||||
)
|
||||
@@ -278,10 +273,13 @@ class AutarcoSolarSensorEntity(AutarcoSensorBase):
|
||||
return self.entity_description.value_fn(self.coordinator.data.solar)
|
||||
|
||||
|
||||
class AutarcoInverterSensorEntity(AutarcoSensorBase):
|
||||
class AutarcoInverterSensorEntity(
|
||||
CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Defines an Autarco inverter sensor."""
|
||||
|
||||
entity_description: AutarcoInverterSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -290,8 +288,10 @@ class AutarcoInverterSensorEntity(AutarcoSensorBase):
|
||||
description: AutarcoInverterSensorEntityDescription,
|
||||
serial_number: str,
|
||||
) -> None:
|
||||
"""Initialize Autarco inverter sensor."""
|
||||
super().__init__(coordinator, description)
|
||||
"""Initialize Autarco sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._serial_number = serial_number
|
||||
self._attr_unique_id = f"{serial_number}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
||||
@@ -12,9 +12,8 @@ from typing import Any, Protocol, cast
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -115,57 +114,6 @@ ATTR_SOURCE = "source"
|
||||
ATTR_VARIABLES = "variables"
|
||||
SERVICE_TRIGGER = "trigger"
|
||||
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
|
||||
|
||||
_EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"light",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"alarm_control_panel",
|
||||
"assist_satellite",
|
||||
"binary_sensor",
|
||||
"button",
|
||||
"climate",
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"media_player",
|
||||
"schedule",
|
||||
"switch",
|
||||
"text",
|
||||
"update",
|
||||
"vacuum",
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_condition(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental condition platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_CONDITION_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_trigger(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental trigger platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_TRIGGER_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class IfAction(Protocol):
|
||||
"""Define the format of if_action."""
|
||||
@@ -369,20 +317,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
|
||||
async_labs_listen(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
new_triggers_conditions_listener,
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_config)
|
||||
|
||||
return True
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"feedback_url": "https://forms.gle/fWFZqf5MzuwWTsCH8",
|
||||
"learn_more_url": "https://www.home-assistant.io/blog/2025/12/03/release-202512/#purpose-specific-triggers-and-conditions",
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -69,10 +69,10 @@
|
||||
},
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"description": "Enables new purpose-specific triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own purpose-specific triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new purpose-specific triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new purpose-specific triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Purpose-specific triggers and conditions"
|
||||
"description": "Enables new intuitive triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own intuitive triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new intuitive triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new intuitive triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Intuitive triggers and conditions"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@kaareseras"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_data_explorer",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["azure"],
|
||||
"requirements": ["azure-kusto-ingest==4.5.1", "azure-kusto-data[aio]==4.5.1"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@timmo001"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_devops",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioazuredevops"],
|
||||
"requirements": ["aioazuredevops==2.2.2"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@eavanvalkenburg"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_event_hub",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["azure"],
|
||||
"requirements": ["azure-eventhub==5.11.1"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@bdraco", "@jfroy"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/baf",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aiobafi6==0.9.0"],
|
||||
"zeroconf": [
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/balboa",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pybalboa"],
|
||||
"requirements": ["pybalboa==1.1.3"]
|
||||
|
||||
@@ -21,29 +21,29 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import DOMAIN
|
||||
from .websocket import BeoWebsocket
|
||||
from .websocket import BangOlufsenWebsocket
|
||||
|
||||
|
||||
@dataclass
|
||||
class BeoData:
|
||||
class BangOlufsenData:
|
||||
"""Dataclass for API client and WebSocket client."""
|
||||
|
||||
websocket: BeoWebsocket
|
||||
websocket: BangOlufsenWebsocket
|
||||
client: MozartClient
|
||||
|
||||
|
||||
type BeoConfigEntry = ConfigEntry[BeoData]
|
||||
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
|
||||
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:
|
||||
"""Set up from a config entry."""
|
||||
|
||||
# Remove casts to str
|
||||
assert entry.unique_id
|
||||
|
||||
# Create device now as BeoWebsocket needs a device for debug logging, firing events etc.
|
||||
# Create device now as BangOlufsenWebsocket needs a device for debug logging, firing events etc.
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
@@ -68,10 +68,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
await client.close_api_client()
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error
|
||||
|
||||
websocket = BeoWebsocket(hass, entry, client)
|
||||
websocket = BangOlufsenWebsocket(hass, entry, client)
|
||||
|
||||
# Add the websocket and API client
|
||||
entry.runtime_data = BeoData(websocket, client)
|
||||
entry.runtime_data = BangOlufsenData(websocket, client)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -82,7 +82,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: BangOlufsenConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
# Close the API client and WebSocket notification listener
|
||||
entry.runtime_data.client.disconnect_notifications()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user