mirror of
https://github.com/home-assistant/core.git
synced 2025-12-20 23:08:12 +00:00
Compare commits
12 Commits
schedule/a
...
aioshelly_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29b3712836 | ||
|
|
9099cae4db | ||
|
|
a53036ca2c | ||
|
|
46ba228d4f | ||
|
|
e36a62b0d6 | ||
|
|
adc55258c6 | ||
|
|
c330bebf4c | ||
|
|
d52152003b | ||
|
|
be19fef6dd | ||
|
|
2b35b7fc65 | ||
|
|
643c1a2259 | ||
|
|
963ebfaf3b |
@@ -13,7 +13,6 @@ core: &core
|
||||
|
||||
# Our base platforms, that are used by other integrations
|
||||
base_platforms: &base_platforms
|
||||
- homeassistant/components/ai_task/**
|
||||
- homeassistant/components/air_quality/**
|
||||
- homeassistant/components/alarm_control_panel/**
|
||||
- homeassistant/components/assist_satellite/**
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github",
|
||||
|
||||
256
.github/workflows/builder.yml
vendored
256
.github/workflows/builder.yml
vendored
@@ -14,9 +14,6 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.12.0"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -24,16 +21,18 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -70,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -80,7 +79,7 @@ jobs:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -89,14 +88,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -122,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -169,7 +163,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,60 +184,16 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${{ steps.vars.outputs.base_image }}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${{ steps.vars.outputs.cache_image }}"
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ steps.vars.outputs.platform }}
|
||||
push: true
|
||||
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--cosign \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -273,7 +223,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -295,7 +245,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -311,7 +261,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -354,7 +304,13 @@ jobs:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- *install_cosign
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -364,104 +320,88 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Verify architecture image signatures
|
||||
- name: Build Meta Image
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||
|
||||
# Generate all Docker tags based on version string
|
||||
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
|
||||
# Examples:
|
||||
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
|
||||
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
|
||||
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
|
||||
- name: Generate Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.registry }}/home-assistant
|
||||
sep-tags: ","
|
||||
tags: |
|
||||
type=raw,value=${{ needs.init.outputs.version }},priority=9999
|
||||
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
for attempt in 1 2 3; do
|
||||
if docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
|
||||
break
|
||||
fi
|
||||
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
|
||||
sleep 10
|
||||
if [ "${attempt}" -eq 3 ]; then
|
||||
echo "Failed after 3 attempts"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
done
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
}
|
||||
|
||||
# Create manifest with ALL tags in a single operation (much faster!)
|
||||
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
|
||||
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
|
||||
function validate_image() {
|
||||
local image=${1}
|
||||
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
|
||||
echo "Invalid signature!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Sign each tag separately (signing requires individual tag names)
|
||||
echo "Signing all tags..."
|
||||
for tag in "${TAGS[@]}"; do
|
||||
echo "Signing ${tag}"
|
||||
cosign sign --yes "${tag}"
|
||||
done
|
||||
function push_dockerhub() {
|
||||
local image=${1}
|
||||
local tag=${2}
|
||||
|
||||
echo "All manifests created and signed successfully"
|
||||
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
|
||||
docker push "docker.io/homeassistant/${image}:${tag}"
|
||||
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
|
||||
}
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create general tags
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
create_manifest "dev" "${{ needs.init.outputs.version }}"
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
else
|
||||
create_manifest "stable" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "latest" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create series version tag (e.g. 2021.6)
|
||||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
@@ -474,15 +414,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -519,7 +459,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
24
.github/workflows/ci.yaml
vendored
24
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -257,13 +257,13 @@ jobs:
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: &key-pre-commit-venv >-
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
- &cache-restore-pre-commit-venv
|
||||
name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-restore actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -511,7 +511,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
@@ -534,7 +534,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -864,7 +864,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: &actions-download-artifact actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- &compile-english-translations
|
||||
@@ -1188,7 +1188,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1313,7 +1313,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@7266a7ce5c1df01b1c6db85bf8cd86c737dadbe7 # v6.0.0
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
23
.github/workflows/wheels.yml
vendored
23
.github/workflows/wheels.yml
vendored
@@ -28,14 +28,16 @@ jobs:
|
||||
name: Initialize wheels builder
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -48,6 +50,10 @@ jobs:
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Create requirements_diff file
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
|
||||
@@ -74,7 +80,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -108,10 +114,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix: &matrix-build
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ["amd64", "aarch64"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
@@ -119,7 +124,7 @@ jobs:
|
||||
|
||||
- &download-env-file
|
||||
name: Download env_file
|
||||
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: &actions-download-artifact actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
@@ -135,8 +140,9 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -177,6 +183,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: *home-assistant-wheels
|
||||
with:
|
||||
|
||||
@@ -94,7 +94,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/(metadata|docker)\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
||||
@@ -120,6 +120,7 @@ homeassistant.components.blueprint.*
|
||||
homeassistant.components.bluesound.*
|
||||
homeassistant.components.bluetooth.*
|
||||
homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
@@ -187,7 +188,6 @@ homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyid.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
|
||||
40
CODEOWNERS
generated
40
CODEOWNERS
generated
@@ -73,8 +73,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airobot/ @mettolen
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airpatrol/ @antondalgren
|
||||
/tests/components/airpatrol/ @antondalgren
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
/tests/components/airq/ @Sibgatulin @dl2080
|
||||
/homeassistant/components/airthings/ @danielhiversen @LaStrada
|
||||
@@ -123,8 +121,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/androidtv/ @JeffLIrion @ollo69
|
||||
/homeassistant/components/androidtv_remote/ @tronikos @Drafteed
|
||||
/tests/components/androidtv_remote/ @tronikos @Drafteed
|
||||
/homeassistant/components/anglian_water/ @pantherale0
|
||||
/tests/components/anglian_water/ @pantherale0
|
||||
/homeassistant/components/anova/ @Lash-L
|
||||
/tests/components/anova/ @Lash-L
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
@@ -187,8 +183,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/automation/ @home-assistant/core
|
||||
/tests/components/automation/ @home-assistant/core
|
||||
/homeassistant/components/avea/ @pattyland
|
||||
/homeassistant/components/awair/ @ahayworth @ricohageman
|
||||
/tests/components/awair/ @ahayworth @ricohageman
|
||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||
/tests/components/aws_s3/ @tomasbedrich
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
@@ -220,8 +216,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||
/homeassistant/components/blebox/ @bbx-a @swistakm
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blink/ @fronzbot
|
||||
/tests/components/blink/ @fronzbot
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
@@ -308,8 +304,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/config/ @home-assistant/core
|
||||
/homeassistant/components/configurator/ @home-assistant/core
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool @davidrecordon
|
||||
/tests/components/control4/ @lawtancool @davidrecordon
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
@@ -420,8 +416,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
/tests/components/efergy/ @tkdrob
|
||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||
/homeassistant/components/egauge/ @neggert
|
||||
/tests/components/egauge/ @neggert
|
||||
/homeassistant/components/eheimdigital/ @autinerd
|
||||
/tests/components/eheimdigital/ @autinerd
|
||||
/homeassistant/components/ekeybionyx/ @richardpolzer
|
||||
@@ -456,15 +450,13 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyid/ @JrtPec @Molier
|
||||
/tests/components/energyid/ @JrtPec @Molier
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten @SanderBlom
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
@@ -480,8 +472,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/essent/ @jaapp
|
||||
/tests/components/essent/ @jaapp
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
@@ -543,8 +533,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fressnapf_tracker/ @eifinger
|
||||
/tests/components/fressnapf_tracker/ @eifinger
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
@@ -575,8 +563,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/generic_hygrostat/ @Shulyaka
|
||||
/homeassistant/components/geniushub/ @manzanotti
|
||||
/tests/components/geniushub/ @manzanotti
|
||||
/homeassistant/components/gentex_homelink/ @niaexa @ryanjones-gentex
|
||||
/tests/components/gentex_homelink/ @niaexa @ryanjones-gentex
|
||||
/homeassistant/components/geo_json_events/ @exxamalte
|
||||
/tests/components/geo_json_events/ @exxamalte
|
||||
/homeassistant/components/geo_location/ @home-assistant/core
|
||||
@@ -605,8 +591,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/goodwe/ @mletenay @starkillerOG
|
||||
/homeassistant/components/google/ @allenporter
|
||||
/tests/components/google/ @allenporter
|
||||
/homeassistant/components/google_air_quality/ @Thomas55555
|
||||
/tests/components/google_air_quality/ @Thomas55555
|
||||
/homeassistant/components/google_assistant/ @home-assistant/cloud
|
||||
/tests/components/google_assistant/ @home-assistant/cloud
|
||||
/homeassistant/components/google_assistant_sdk/ @tronikos
|
||||
@@ -665,7 +649,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
/tests/components/here_travel_time/ @eifinger
|
||||
/homeassistant/components/hikvision/ @mezz64
|
||||
/tests/components/hikvision/ @mezz64
|
||||
/homeassistant/components/hikvisioncam/ @fbradyirl
|
||||
/homeassistant/components/hisense_aehw4a1/ @bannhead
|
||||
/tests/components/hisense_aehw4a1/ @bannhead
|
||||
@@ -717,8 +700,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/hue_ble/ @flip-dots
|
||||
/tests/components/hue_ble/ @flip-dots
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
@@ -1363,8 +1344,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ring/ @sdb9696
|
||||
/homeassistant/components/risco/ @OnFreund
|
||||
/tests/components/risco/ @OnFreund
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
@@ -1770,7 +1751,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
/tests/components/vivotek/ @HarlemSquirrel
|
||||
/homeassistant/components/vizio/ @raman325
|
||||
/tests/components/vizio/ @raman325
|
||||
/homeassistant/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
@@ -1810,8 +1790,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/weatherflow_cloud/ @jeeftor
|
||||
/homeassistant/components/weatherkit/ @tjhorner
|
||||
/tests/components/weatherkit/ @tjhorner
|
||||
/homeassistant/components/web_rtc/ @home-assistant/core
|
||||
/tests/components/web_rtc/ @home-assistant/core
|
||||
/homeassistant/components/webdav/ @jpbede
|
||||
/tests/components/webdav/ @jpbede
|
||||
/homeassistant/components/webhook/ @home-assistant/core
|
||||
|
||||
31
Dockerfile
generated
31
Dockerfile
generated
@@ -4,33 +4,32 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
LABEL \
|
||||
io.hass.type="core" \
|
||||
org.opencontainers.image.authors="The Home Assistant Authors" \
|
||||
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
|
||||
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
|
||||
org.opencontainers.image.licenses="Apache-2.0" \
|
||||
org.opencontainers.image.source="https://github.com/home-assistant/core" \
|
||||
org.opencontainers.image.title="Home Assistant" \
|
||||
org.opencontainers.image.url="https://www.home-assistant.io/"
|
||||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.17
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.6
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -35,22 +35,25 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN --mount=type=bind,source=.python-version,target=.python-version \
|
||||
uv python install \
|
||||
&& uv venv $VIRTUAL_ENV
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
|
||||
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
|
||||
16
build.yaml
Normal file
16
build.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
labels:
|
||||
io.hass.type: core
|
||||
org.opencontainers.image.title: Home Assistant
|
||||
org.opencontainers.image.description: Open-source home automation platform running on Python 3
|
||||
org.opencontainers.image.source: https://github.com/home-assistant/core
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
@@ -7,7 +7,6 @@ from typing import Any, Final
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_LOVELACE_UPDATED,
|
||||
EVENT_PANELS_UPDATED,
|
||||
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
|
||||
@@ -46,7 +45,6 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
||||
EVENT_STATE_CHANGED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
EVENT_LABEL_REGISTRY_UPDATED,
|
||||
EVENT_LABS_UPDATED,
|
||||
EVENT_CATEGORY_REGISTRY_UPDATED,
|
||||
EVENT_FLOOR_REGISTRY_UPDATED,
|
||||
}
|
||||
|
||||
@@ -624,16 +624,13 @@ async def async_enable_logging(
|
||||
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ and "HA_DUPLICATE_LOG_FILE" not in os.environ:
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
def rename_old_file() -> None:
|
||||
"""Rename old log file in executor."""
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
|
||||
await hass.async_add_executor_job(rename_old_file)
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
@@ -1003,7 +1000,7 @@ class _WatchPendingSetups:
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting for integrations to complete setup: %s",
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "google",
|
||||
"name": "Google",
|
||||
"integrations": [
|
||||
"google_air_quality",
|
||||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "philips",
|
||||
"name": "Philips",
|
||||
"integrations": ["dynalite", "hue", "hue_ble", "philips_js"]
|
||||
"integrations": ["dynalite", "hue", "philips_js"]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "raspberry_pi",
|
||||
"name": "Raspberry Pi",
|
||||
"integrations": ["raspberry_pi", "rpi_power", "remote_rpi_gpio"]
|
||||
"integrations": ["raspberry_pi", "rpi_camera", "rpi_power", "remote_rpi_gpio"]
|
||||
}
|
||||
|
||||
@@ -9,9 +9,8 @@ from actron_neo_api import (
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER
|
||||
from .coordinator import (
|
||||
ActronAirConfigEntry,
|
||||
ActronAirRuntimeData,
|
||||
@@ -30,13 +29,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronAirAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
except ActronAirAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronAirAPIError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
|
||||
for system in systems:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
"""Setup config flow for Actron Air integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
@@ -96,16 +95,8 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
unique_id = str(user_data["id"])
|
||||
await self.async_set_unique_id(unique_id)
|
||||
|
||||
# Check if this is a reauth flow
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_data["email"],
|
||||
data={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||
@@ -123,21 +114,6 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
del self.login_task
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication request."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
if user_input is not None:
|
||||
return await self.async_step_user()
|
||||
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
|
||||
async def async_step_connection_error(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -5,23 +5,16 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAuthError,
|
||||
ActronAirStatus,
|
||||
)
|
||||
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
STALE_DEVICE_TIMEOUT = timedelta(minutes=5)
|
||||
STALE_DEVICE_TIMEOUT = timedelta(hours=24)
|
||||
ERROR_NO_SYSTEMS_FOUND = "no_systems_found"
|
||||
ERROR_UNKNOWN = "unknown_error"
|
||||
|
||||
@@ -36,6 +29,9 @@ class ActronAirRuntimeData:
|
||||
|
||||
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
|
||||
|
||||
AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
@@ -63,14 +59,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
|
||||
async def _async_update_data(self) -> ActronAirStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
try:
|
||||
await self.api.update_status()
|
||||
except ActronAirAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_error",
|
||||
) from err
|
||||
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
return self.status
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.2.0"]
|
||||
"requirements": ["actron-neo-api==0.1.87"]
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"oauth2_error": "Failed to start authentication flow",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You must reauthenticate with the same Actron Air account that was originally configured."
|
||||
"oauth2_error": "Failed to start OAuth2 flow"
|
||||
},
|
||||
"error": {
|
||||
"oauth2_error": "Failed to start authentication flow. Please try again later."
|
||||
"oauth2_error": "Failed to start OAuth2 flow. Please try again later."
|
||||
},
|
||||
"progress": {
|
||||
"wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes."
|
||||
@@ -18,23 +16,14 @@
|
||||
"description": "Failed to connect to Actron Air. Please check your internet connection and try again.",
|
||||
"title": "Connection error"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Your Actron Air authentication has expired. Select continue to reauthenticate with your Actron Air account. You will be prompted to log in again to restore the connection.",
|
||||
"title": "Authentication expired"
|
||||
},
|
||||
"timeout": {
|
||||
"data": {},
|
||||
"description": "The authentication process timed out. Please try again.",
|
||||
"title": "Authentication timeout"
|
||||
"description": "The authorization process timed out. Please try again.",
|
||||
"title": "Authorization timeout"
|
||||
},
|
||||
"user": {
|
||||
"title": "Actron Air Authentication"
|
||||
"title": "Actron Air OAuth2 Authorization"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Bre77"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["advantage_air"],
|
||||
"requirements": ["advantage-air==0.4.4"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Noltari"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.4"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aftership",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["pyaftership==21.11.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@ispysoftware"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
|
||||
@@ -101,8 +101,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
|
||||
{"accept": ["*/*"], "multiple": True}
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
@@ -118,8 +118,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): selector.MediaSelector(
|
||||
{"accept": ["*/*"], "multiple": True}
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@asymworks"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airnow",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyairnow"],
|
||||
"requirements": ["pyairnow==1.3.1"]
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -75,19 +75,9 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature.
|
||||
|
||||
If floor temperature is available, thermostat is set up for floor heating.
|
||||
"""
|
||||
if self._status.temp_floor is not None:
|
||||
return self._status.temp_floor
|
||||
"""Return the current temperature."""
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self._status.hum_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
@@ -136,13 +126,6 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set HVAC mode.
|
||||
|
||||
This thermostat only supports HEAT mode. The climate platform validates
|
||||
that only supported modes are passed, so this method is a no-op.
|
||||
"""
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -175,56 +174,6 @@ class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
# Combine existing data with new password
|
||||
data = {
|
||||
CONF_HOST: reauth_entry.data[CONF_HOST],
|
||||
CONF_USERNAME: reauth_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"username": reauth_entry.data[CONF_USERNAME],
|
||||
"host": reauth_entry.data[CONF_HOST],
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -11,7 +11,6 @@ from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -54,15 +53,7 @@ class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except AirobotAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from err
|
||||
except AirobotConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
) from err
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Diagnostics support for Airobot."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = [CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AirobotConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Build device capabilities info
|
||||
device_capabilities = None
|
||||
if coordinator.data:
|
||||
device_capabilities = {
|
||||
"has_floor_sensor": coordinator.data.status.has_floor_sensor,
|
||||
"has_co2_sensor": coordinator.data.status.has_co2_sensor,
|
||||
"hw_version": coordinator.data.status.hw_version,
|
||||
"fw_version": coordinator.data.status.fw_version,
|
||||
}
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_CONFIG),
|
||||
"device_capabilities": device_capabilities,
|
||||
"status": asdict(coordinator.data.status) if coordinator.data else None,
|
||||
"settings": asdict(coordinator.data.settings) if coordinator.data else None,
|
||||
}
|
||||
@@ -12,6 +12,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -34,17 +34,17 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
@@ -54,14 +54,12 @@ rules:
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
"""Sensor platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pyairobotrest.models import ThermostatStatus
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.variance import ignore_variance
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Airobot sensor entity."""
|
||||
|
||||
value_fn: Callable[[ThermostatStatus], StateType | datetime]
|
||||
supported_fn: Callable[[ThermostatStatus], bool] = lambda _: True
|
||||
|
||||
|
||||
uptime_to_stable_datetime = ignore_variance(
|
||||
lambda value: utcnow().replace(microsecond=0) - timedelta(seconds=value),
|
||||
timedelta(minutes=2),
|
||||
)
|
||||
|
||||
SENSOR_TYPES: tuple[AirobotSensorEntityDescription, ...] = (
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_temperature",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.hum_air,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="floor_temperature",
|
||||
translation_key="floor_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.temp_floor,
|
||||
supported_fn=lambda status: status.has_floor_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.co2,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="air_quality_index",
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda status: status.aqi,
|
||||
supported_fn=lambda status: status.has_co2_sensor,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="heating_uptime",
|
||||
translation_key="heating_uptime",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.heating_uptime,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="errors",
|
||||
translation_key="errors",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: status.errors,
|
||||
),
|
||||
AirobotSensorEntityDescription(
|
||||
key="device_uptime",
|
||||
translation_key="device_uptime",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda status: uptime_to_stable_datetime(status.device_uptime),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot sensor platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotSensor(coordinator, description)
|
||||
for description in SENSOR_TYPES
|
||||
if description.supported_fn(coordinator.data.status)
|
||||
)
|
||||
|
||||
|
||||
class AirobotSensor(AirobotEntity, SensorEntity):
|
||||
"""Representation of an Airobot sensor."""
|
||||
|
||||
entity_description: AirobotSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator,
|
||||
description: AirobotSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.status)
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -15,24 +14,15 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
"password": "The thermostat password."
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "The authentication for Airobot thermostat at {host} (Device ID: {username}) has expired. Please enter the password to reauthenticate. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "Device ID"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
@@ -43,32 +33,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"device_uptime": {
|
||||
"name": "Device uptime"
|
||||
},
|
||||
"errors": {
|
||||
"name": "Error count"
|
||||
},
|
||||
"floor_temperature": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"heating_uptime": {
|
||||
"name": "Heating uptime"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
"""The AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import PLATFORMS
|
||||
from .coordinator import AirPatrolConfigEntry, AirPatrolDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirPatrolConfigEntry) -> bool:
|
||||
"""Set up AirPatrol from a config entry."""
|
||||
coordinator = AirPatrolDataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirPatrolConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,208 +0,0 @@
|
||||
"""Climate platform for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
SWING_OFF,
|
||||
SWING_ON,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirPatrolConfigEntry
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
from .entity import AirPatrolEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
AP_TO_HA_HVAC_MODES = {
|
||||
"heat": HVACMode.HEAT,
|
||||
"cool": HVACMode.COOL,
|
||||
"off": HVACMode.OFF,
|
||||
}
|
||||
HA_TO_AP_HVAC_MODES = {value: key for key, value in AP_TO_HA_HVAC_MODES.items()}
|
||||
|
||||
AP_TO_HA_FAN_MODES = {
|
||||
"min": FAN_LOW,
|
||||
"max": FAN_HIGH,
|
||||
"auto": FAN_AUTO,
|
||||
}
|
||||
HA_TO_AP_FAN_MODES = {value: key for key, value in AP_TO_HA_FAN_MODES.items()}
|
||||
|
||||
AP_TO_HA_SWING_MODES = {
|
||||
"on": SWING_ON,
|
||||
"off": SWING_OFF,
|
||||
}
|
||||
HA_TO_AP_SWING_MODES = {value: key for key, value in AP_TO_HA_SWING_MODES.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirPatrolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirPatrol climate entities."""
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
AirPatrolClimate(coordinator, unit_id)
|
||||
for unit_id, unit in units.items()
|
||||
if "climate" in unit
|
||||
)
|
||||
|
||||
|
||||
class AirPatrolClimate(AirPatrolEntity, ClimateEntity):
|
||||
"""AirPatrol climate entity."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.SWING_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.COOL, HVACMode.OFF]
|
||||
_attr_fan_modes = [FAN_LOW, FAN_HIGH, FAN_AUTO]
|
||||
_attr_swing_modes = [SWING_ON, SWING_OFF]
|
||||
_attr_min_temp = 16.0
|
||||
_attr_max_temp = 30.0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
super().__init__(coordinator, unit_id)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{unit_id}"
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data."""
|
||||
return self.device_data.get("climate") or {}
|
||||
|
||||
@property
|
||||
def params(self) -> dict[str, Any]:
|
||||
"""Return the current parameters for the climate entity."""
|
||||
return self.climate_data.get("ParametersData") or {}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bool(self.climate_data)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
if humidity := self.climate_data.get("RoomHumidity"):
|
||||
return float(humidity)
|
||||
return None
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if temp := self.climate_data.get("RoomTemp"):
|
||||
return float(temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
if temp := self.params.get("PumpTemp"):
|
||||
return float(temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
pump_power = self.params.get("PumpPower")
|
||||
pump_mode = self.params.get("PumpMode")
|
||||
|
||||
if pump_power and pump_power == "on" and pump_mode:
|
||||
return AP_TO_HA_HVAC_MODES.get(pump_mode)
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_speed = self.params.get("FanSpeed")
|
||||
if fan_speed:
|
||||
return AP_TO_HA_FAN_MODES.get(fan_speed)
|
||||
return None
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
"""Return the current swing mode."""
|
||||
swing = self.params.get("Swing")
|
||||
if swing:
|
||||
return AP_TO_HA_SWING_MODES.get(swing)
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
params = self.params.copy()
|
||||
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
temp = kwargs[ATTR_TEMPERATURE]
|
||||
params["PumpTemp"] = f"{temp:.3f}"
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
params = self.params.copy()
|
||||
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
params["PumpPower"] = "off"
|
||||
else:
|
||||
params["PumpPower"] = "on"
|
||||
params["PumpMode"] = HA_TO_AP_HVAC_MODES.get(hvac_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
params = self.params.copy()
|
||||
params["FanSpeed"] = HA_TO_AP_FAN_MODES.get(fan_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new target swing mode."""
|
||||
params = self.params.copy()
|
||||
params["Swing"] = HA_TO_AP_SWING_MODES.get(swing_mode)
|
||||
|
||||
await self._async_set_params(params)
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
params = self.params.copy()
|
||||
if mode := AP_TO_HA_HVAC_MODES.get(params["PumpMode"]):
|
||||
await self.async_set_hvac_mode(mode)
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.async_set_hvac_mode(HVACMode.OFF)
|
||||
|
||||
async def _async_set_params(self, params: dict[str, Any]) -> None:
|
||||
"""Set the unit to dry mode."""
|
||||
new_climate_data = self.climate_data.copy()
|
||||
new_climate_data["ParametersData"] = params
|
||||
|
||||
await self.coordinator.api.set_unit_climate_data(
|
||||
self._unit_id, new_climate_data
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -1,111 +0,0 @@
|
||||
"""Config flow for the AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from airpatrol.api import AirPatrolAPI, AirPatrolAuthenticationError, AirPatrolError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.EMAIL,
|
||||
autocomplete="email",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_api(
|
||||
hass: HomeAssistant, user_input: dict[str, str]
|
||||
) -> tuple[str | None, str | None, dict[str, str]]:
|
||||
"""Validate the API connection."""
|
||||
errors: dict[str, str] = {}
|
||||
session = async_get_clientsession(hass)
|
||||
access_token = None
|
||||
unique_id = None
|
||||
try:
|
||||
api = await AirPatrolAPI.authenticate(
|
||||
session, user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except AirPatrolAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirPatrolError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
access_token = api.get_access_token()
|
||||
unique_id = api.get_unique_id()
|
||||
|
||||
return (access_token, unique_id, errors)
|
||||
|
||||
|
||||
class AirPatrolConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for AirPatrol."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
access_token, unique_id, errors = await validate_api(self.hass, user_input)
|
||||
if access_token and unique_id:
|
||||
user_input[CONF_ACCESS_TOKEN] = access_token
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication with new credentials."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication confirmation."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input:
|
||||
access_token, unique_id, errors = await validate_api(self.hass, user_input)
|
||||
if access_token and unique_id:
|
||||
await self.async_set_unique_id(unique_id)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
user_input[CONF_ACCESS_TOKEN] = access_token
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm", data_schema=DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -1,16 +0,0 @@
|
||||
"""Constants for the AirPatrol integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airpatrol.api import AirPatrolAuthenticationError, AirPatrolError
|
||||
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "airpatrol"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
AIRPATROL_ERRORS = (AirPatrolAuthenticationError, AirPatrolError)
|
||||
@@ -1,100 +0,0 @@
|
||||
"""Data update coordinator for AirPatrol."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from airpatrol.api import AirPatrolAPI, AirPatrolAuthenticationError, AirPatrolError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
|
||||
type AirPatrolConfigEntry = ConfigEntry[AirPatrolDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirPatrolDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
"""Class to manage fetching AirPatrol data."""
|
||||
|
||||
config_entry: AirPatrolConfigEntry
|
||||
api: AirPatrolAPI
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: AirPatrolConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"{DOMAIN.capitalize()} {config_entry.title}",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
await self._setup_client()
|
||||
except AirPatrolError as api_err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with AirPatrol API: {api_err}"
|
||||
) from api_err
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Update unit data from AirPatrol API."""
|
||||
return {unit_data["unit_id"]: unit_data for unit_data in await self._get_data()}
|
||||
|
||||
async def _get_data(self, retry: bool = False) -> list[dict[str, Any]]:
|
||||
"""Fetch data from API."""
|
||||
try:
|
||||
return await self.api.get_data()
|
||||
except AirPatrolAuthenticationError as auth_err:
|
||||
if retry:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication with AirPatrol failed"
|
||||
) from auth_err
|
||||
await self._update_token()
|
||||
return await self._get_data(retry=True)
|
||||
except AirPatrolError as err:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with AirPatrol API: {err}"
|
||||
) from err
|
||||
|
||||
async def _update_token(self) -> None:
|
||||
"""Refresh the AirPatrol API client and update the access token."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
self.api = await AirPatrolAPI.authenticate(
|
||||
session,
|
||||
self.config_entry.data[CONF_EMAIL],
|
||||
self.config_entry.data[CONF_PASSWORD],
|
||||
)
|
||||
except AirPatrolAuthenticationError as auth_err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Authentication with AirPatrol failed"
|
||||
) from auth_err
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data={
|
||||
**self.config_entry.data,
|
||||
CONF_ACCESS_TOKEN: self.api.get_access_token(),
|
||||
},
|
||||
)
|
||||
|
||||
async def _setup_client(self) -> None:
|
||||
"""Set up the AirPatrol API client from stored access_token."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = AirPatrolAPI(
|
||||
session,
|
||||
self.config_entry.data[CONF_ACCESS_TOKEN],
|
||||
self.config_entry.unique_id,
|
||||
)
|
||||
try:
|
||||
await api.get_data()
|
||||
except AirPatrolAuthenticationError:
|
||||
await self._update_token()
|
||||
self.api = api
|
||||
@@ -1,44 +0,0 @@
|
||||
"""Base entity for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirPatrolEntity(CoordinatorEntity[AirPatrolDataUpdateCoordinator]):
|
||||
"""Base entity for AirPatrol devices."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
) -> None:
|
||||
"""Initialize the AirPatrol entity."""
|
||||
super().__init__(coordinator)
|
||||
self._unit_id = unit_id
|
||||
device = coordinator.data[unit_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unit_id)},
|
||||
name=device["name"],
|
||||
manufacturer=device["manufacturer"],
|
||||
model=device["model"],
|
||||
serial_number=device["hwid"],
|
||||
)
|
||||
|
||||
@property
|
||||
def device_data(self) -> dict[str, Any]:
|
||||
"""Return the device data."""
|
||||
return self.coordinator.data[self._unit_id]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._unit_id in self.coordinator.data
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "airpatrol",
|
||||
"name": "AirPatrol",
|
||||
"codeowners": ["@antondalgren"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airpatrol",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airpatrol==0.1.0"]
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not provide custom actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities doesn't subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "Login credentials do not match the configured account"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "[%key:component::airpatrol::config::step::user::data_description::email%]",
|
||||
"password": "[%key:component::airpatrol::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Reauthenticate with AirPatrol"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "Your AirPatrol email address",
|
||||
"password": "Your AirPatrol password"
|
||||
},
|
||||
"description": "Connect to AirPatrol"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,6 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["airthings"],
|
||||
"requirements": ["airthings-cloud==0.2.0"]
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@samsinnamon"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch4",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["airtouch4pyapi"],
|
||||
"requirements": ["airtouch4pyapi==1.0.5"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@danzel"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["airtouch5py"],
|
||||
"requirements": ["airtouch5py==0.3.0"]
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.4"]
|
||||
"requirements": ["aioairzone==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Noltari"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.7.2"]
|
||||
|
||||
@@ -36,28 +36,5 @@
|
||||
"alarm_trigger": {
|
||||
"service": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"trigger": "mdi:shield"
|
||||
},
|
||||
"armed_away": {
|
||||
"trigger": "mdi:shield-lock"
|
||||
},
|
||||
"armed_home": {
|
||||
"trigger": "mdi:shield-home"
|
||||
},
|
||||
"armed_night": {
|
||||
"trigger": "mdi:shield-moon"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"trigger": "mdi:shield-airplane"
|
||||
},
|
||||
"disarmed": {
|
||||
"trigger": "mdi:shield-off"
|
||||
},
|
||||
"triggered": {
|
||||
"trigger": "mdi:bell-ring"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted alarms to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"arm_away": "Arm {entity_name} away",
|
||||
@@ -75,15 +71,6 @@
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
@@ -156,77 +143,5 @@
|
||||
"name": "Trigger"
|
||||
}
|
||||
},
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers after one or more alarms become armed, regardless of the mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed"
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers after one or more alarms become armed in away mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed away"
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers after one or more alarms become armed in home mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed home"
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers after one or more alarms become armed in night mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed night"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers after one or more alarms become armed in vacation mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm armed vacation"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers after one or more alarms become disarmed.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers after one or more alarms become triggered.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm triggered"
|
||||
}
|
||||
}
|
||||
"title": "Alarm control panel"
|
||||
}
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
"""Provides triggers for alarm control panels."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
|
||||
|
||||
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return bool(get_supported_features(hass, entity_id) & features)
|
||||
except HomeAssistantError:
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if supports_feature(self._hass, entity_id, self._required_features)
|
||||
}
|
||||
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityTargetStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_state = to_state
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
AlarmControlPanelState.DISARMED,
|
||||
AlarmControlPanelState.DISARMING,
|
||||
AlarmControlPanelState.PENDING,
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
},
|
||||
to_states={
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
},
|
||||
),
|
||||
"armed_away": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY,
|
||||
),
|
||||
"armed_home": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelEntityFeature.ARM_HOME,
|
||||
),
|
||||
"armed_night": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT,
|
||||
),
|
||||
"armed_vacation": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED
|
||||
),
|
||||
"triggered": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for alarm control panels."""
|
||||
return TRIGGERS
|
||||
@@ -1,53 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
armed: *trigger_common
|
||||
|
||||
armed_away:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
armed_home:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
armed_night:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
armed_vacation:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
disarmed: *trigger_common
|
||||
|
||||
triggered: *trigger_common
|
||||
@@ -21,7 +21,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = 300
|
||||
SCAN_INTERVAL = 30
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
@@ -45,7 +45,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=SCAN_INTERVAL, immediate=False
|
||||
hass, _LOGGER, cooldown=30, immediate=False
|
||||
),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==10.0.0"]
|
||||
"requirements": ["aioamazondevices==9.0.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@madpilot"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["amberelectric"],
|
||||
"requirements": ["amberelectric==2.0.12"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@engrbm87"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pydroid-ipcam==3.0.0"]
|
||||
}
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
"""The Anglian Water integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
ExpiredAccessTokenError,
|
||||
SelfAssertedError,
|
||||
SmartMeterUnavailableError,
|
||||
)
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ACCESS_TOKEN,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AnglianWaterConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Anglian Water from a config entry."""
|
||||
auth = MSOB2CAuth(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
refresh_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
try:
|
||||
await auth.send_refresh_request()
|
||||
except (ExpiredAccessTokenError, SelfAssertedError) as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
|
||||
try:
|
||||
await _aw.validate_smart_meter(entry.data[CONF_ACCOUNT_NUMBER])
|
||||
except SmartMeterUnavailableError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="smart_meter_unavailable"
|
||||
) from err
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_ACCESS_TOKEN: auth.refresh_token}
|
||||
)
|
||||
entry.runtime_data = coordinator = AnglianWaterUpdateCoordinator(
|
||||
hass=hass, api=_aw, config_entry=entry
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AnglianWaterConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
@@ -1,92 +0,0 @@
|
||||
"""Config flow for the Anglian Water integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
InvalidAccountIdError,
|
||||
SelfAssertedError,
|
||||
SmartMeterUnavailableError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): selector.TextSelector(),
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(
|
||||
auth: MSOB2CAuth, account_number: str
|
||||
) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
except SelfAssertedError:
|
||||
return "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return "unknown"
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
except (InvalidAccountIdError, SmartMeterUnavailableError):
|
||||
return "smart_meter_unavailable"
|
||||
return auth
|
||||
|
||||
|
||||
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anglian Water."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
validation_response = await validate_credentials(
|
||||
MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
),
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
if isinstance(validation_response, str):
|
||||
errors["base"] = validation_response
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -1,4 +0,0 @@
|
||||
"""Constants for the Anglian Water integration."""
|
||||
|
||||
DOMAIN = "anglian_water"
|
||||
CONF_ACCOUNT_NUMBER = "account_number"
|
||||
@@ -1,49 +0,0 @@
|
||||
"""Anglian Water data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
type AnglianWaterConfigEntry = ConfigEntry[AnglianWaterUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
|
||||
class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Anglian Water data update coordinator."""
|
||||
|
||||
config_entry: AnglianWaterConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
api: AnglianWater,
|
||||
config_entry: AnglianWaterConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize update coordinator."""
|
||||
super().__init__(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
return await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
@@ -1,48 +0,0 @@
|
||||
"""Anglian Water entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pyanglianwater.meter import SmartMeter
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AnglianWaterUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
|
||||
"""Defines a Anglian Water entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water entity."""
|
||||
super().__init__(coordinator)
|
||||
self.smart_meter = smart_meter
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, smart_meter.serial_number)},
|
||||
name=smart_meter.serial_number,
|
||||
manufacturer="Anglian Water",
|
||||
serial_number=smart_meter.serial_number,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is loaded."""
|
||||
self.coordinator.api.updated_data_callbacks.append(self.async_write_ha_state)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""When will be removed from HASS."""
|
||||
self.coordinator.api.updated_data_callbacks.remove(self.async_write_ha_state)
|
||||
await super().async_will_remove_from_hass()
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "anglian_water",
|
||||
"name": "Anglian Water",
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyanglianwater"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==3.1.0"]
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
Unable to discover meters.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
Unable to discover meters.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
No entities are disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities do not require different icons.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
Read-only integration and no repairs are possible.
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -1,117 +0,0 @@
|
||||
"""Anglian Water sensor platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from pyanglianwater.meter import SmartMeter
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
EntityCategory,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
from .entity import AnglianWaterEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class AnglianWaterSensor(StrEnum):
|
||||
"""Store keys for Anglian Water sensors."""
|
||||
|
||||
YESTERDAY_CONSUMPTION = "yesterday_consumption"
|
||||
YESTERDAY_WATER_COST = "yesterday_water_cost"
|
||||
YESTERDAY_SEWERAGE_COST = "yesterday_sewerage_cost"
|
||||
LATEST_READING = "latest_reading"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AnglianWaterSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes AnglianWater sensor entity."""
|
||||
|
||||
value_fn: Callable[[SmartMeter], float]
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[AnglianWaterSensorEntityDescription, ...] = (
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
value_fn=lambda entity: entity.get_yesterday_consumption,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.LATEST_READING,
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
value_fn=lambda entity: entity.latest_read,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
translation_key=AnglianWaterSensor.LATEST_READING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_WATER_COST,
|
||||
native_unit_of_measurement="GBP",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
value_fn=lambda entity: entity.yesterday_water_cost,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_WATER_COST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
|
||||
native_unit_of_measurement="GBP",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
value_fn=lambda entity: entity.yesterday_sewerage_cost,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AnglianWaterConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
async_add_devices(
|
||||
AnglianWaterSensorEntity(
|
||||
coordinator=entry.runtime_data,
|
||||
description=entity_description,
|
||||
smart_meter=smart_meter,
|
||||
)
|
||||
for entity_description in ENTITY_DESCRIPTIONS
|
||||
for smart_meter in entry.runtime_data.api.meters.values()
|
||||
)
|
||||
|
||||
|
||||
class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
|
||||
"""Defines a Anglian Water sensor."""
|
||||
|
||||
entity_description: AnglianWaterSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
description: AnglianWaterSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water sensor."""
|
||||
super().__init__(coordinator, smart_meter, description.key)
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.smart_meter)
|
||||
@@ -1,55 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"smart_meter_unavailable": "This account does not have any smart meters associated with it. If this is unexpected, enter your Billing Account Number found at the top of your latest bill.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"account_number": "Billing Account Number",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
},
|
||||
"description": "Enter your Anglian Water account credentials to connect to Home Assistant."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"latest_reading": {
|
||||
"name": "Latest reading"
|
||||
},
|
||||
"yesterday_consumption": {
|
||||
"name": "Yesterday's usage"
|
||||
},
|
||||
"yesterday_sewerage_cost": {
|
||||
"name": "Yesterday's sewerage cost"
|
||||
},
|
||||
"yesterday_water_cost": {
|
||||
"name": "Yesterday's water cost"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_expired": {
|
||||
"message": "Authentication token expired"
|
||||
},
|
||||
"service_unavailable": {
|
||||
"message": "Anglian Water services are currently unavailable for maintenance."
|
||||
},
|
||||
"smart_meter_unavailable": {
|
||||
"message": "This account no longer has a smart meter associated with it."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@Lash-L"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anova",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["anova_wifi"],
|
||||
"requirements": ["anova-wifi==0.17.0"]
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@hyralex"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthemav",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["anthemav"],
|
||||
"requirements": ["anthemav==1.4.1"]
|
||||
|
||||
@@ -17,7 +17,13 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -37,7 +43,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
else:
|
||||
model_id = RECOMMENDED_CHAT_MODEL
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -6,7 +6,7 @@ from functools import partial
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
@@ -54,11 +54,17 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
@@ -70,13 +76,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
DEFAULT_CONVERSATION_OPTIONS = {
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
DEFAULT_AI_TASK_OPTIONS = {
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
@@ -130,13 +136,13 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": DEFAULT_CONVERSATION_OPTIONS,
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": DEFAULT_AI_TASK_OPTIONS,
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
@@ -174,9 +180,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
) -> SubentryFlowResult:
|
||||
"""Add a subentry."""
|
||||
if self._subentry_type == "ai_task_data":
|
||||
self.options = DEFAULT_AI_TASK_OPTIONS.copy()
|
||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
self.options = DEFAULT_CONVERSATION_OPTIONS.copy()
|
||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
@@ -277,7 +283,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=DEFAULT[CONF_CHAT_MODEL],
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=await self._get_model_list(), custom_value=True
|
||||
@@ -285,11 +291,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=DEFAULT[CONF_MAX_TOKENS],
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=DEFAULT[CONF_TEMPERATURE],
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
}
|
||||
|
||||
@@ -319,14 +325,12 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
@@ -339,15 +343,15 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=DEFAULT[CONF_WEB_SEARCH],
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
@@ -365,10 +369,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
user_input = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input.get(CONF_WEB_SEARCH, DEFAULT[CONF_WEB_SEARCH]) and not errors:
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
||||
if user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
@@ -421,8 +424,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
@@ -455,7 +456,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
}
|
||||
)
|
||||
response = await client.messages.create(
|
||||
model=cast(str, DEFAULT[CONF_CHAT_MODEL]),
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
@@ -470,7 +471,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
],
|
||||
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
|
||||
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
|
||||
@@ -11,29 +11,25 @@ DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
RECOMMENDED_WEB_SEARCH = False
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||
CONF_WEB_SEARCH_CITY = "city"
|
||||
CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
}
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
|
||||
@@ -84,11 +84,14 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -583,7 +586,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@@ -601,19 +604,17 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
|
||||
model_args = MessageCreateParamsStreaming(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
system=system.content,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
@@ -624,7 +625,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@bdr99"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.15"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@yuxincs"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["zeroconf"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@elupus"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/arcam_fmj",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@ikalnyi"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/arve",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["asyncarve==0.1.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@milanmeu"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioaseko"],
|
||||
"requirements": ["aioaseko==1.0.0"]
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pymicro_vad import MicroVad
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -43,8 +42,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -70,49 +69,21 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
self.vad: MicroVad | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -121,5 +92,5 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=self._last_speech_probability,
|
||||
speech_probability=speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pysilero-vad==3.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
@@ -1123,6 +1123,63 @@ class PipelineRun:
|
||||
)
|
||||
|
||||
try:
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
@@ -1208,17 +1265,6 @@ class PipelineRun:
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
@@ -1230,53 +1276,6 @@ class PipelineRun:
|
||||
chat_log_delta_listener=chat_log_delta_listener,
|
||||
) as chat_log,
|
||||
):
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input, chat_log
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
chat_log,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
|
||||
@@ -14,19 +14,5 @@
|
||||
"start_conversation": {
|
||||
"service": "mdi:forum"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"trigger": "mdi:chat-sleep"
|
||||
},
|
||||
"listening": {
|
||||
"trigger": "mdi:chat-question"
|
||||
},
|
||||
"processing": {
|
||||
"trigger": "mdi:chat-processing"
|
||||
},
|
||||
"responding": {
|
||||
"trigger": "mdi:chat-alert"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0"]
|
||||
"requirements": ["hassil==3.4.0"]
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted Assist satellites to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "Assist satellite",
|
||||
@@ -20,13 +16,6 @@
|
||||
"id": "Answer ID",
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -109,47 +98,5 @@
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers after one or more voice assistant satellites become idle after having processed a command.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite became idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers after one or more voice assistant satellites start listening for a command from someone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers after one or more voice assistant satellites start processing a command after having heard it.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers after one or more voice assistant satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite started responding"
|
||||
}
|
||||
}
|
||||
"title": "Assist satellite"
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_target_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.LISTENING
|
||||
),
|
||||
"processing": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"responding": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for assist satellites."""
|
||||
return TRIGGERS
|
||||
@@ -1,20 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
idle: *trigger_common
|
||||
listening: *trigger_common
|
||||
processing: *trigger_common
|
||||
responding: *trigger_common
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.5.4", "asusrouter==1.21.3"]
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@MatsNL"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/atag",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyatag"],
|
||||
"requirements": ["pyatag==0.3.5.3"]
|
||||
|
||||
@@ -27,8 +27,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"codeowners": ["@djtimca"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.5"]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user