This commit is contained in:
Franck Nijhof 2024-12-04 19:58:02 +01:00 committed by GitHub
commit 2b40844171
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2136 changed files with 57808 additions and 16221 deletions

View File

@ -79,6 +79,7 @@ components: &components
- homeassistant/components/group/**
- homeassistant/components/hassio/**
- homeassistant/components/homeassistant/**
- homeassistant/components/homeassistant_hardware/**
- homeassistant/components/http/**
- homeassistant/components/image/**
- homeassistant/components/input_boolean/**

View File

@ -10,7 +10,7 @@ on:
env:
BUILD_TYPE: core
DEFAULT_PYTHON: "3.12"
DEFAULT_PYTHON: "3.13"
PIP_TIMEOUT: 60
UV_HTTP_TIMEOUT: 60
UV_SYSTEM_PYTHON: "true"
@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -40,9 +40,9 @@ env:
CACHE_VERSION: 11
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2024.11"
HA_SHORT_VERSION: "2024.12"
DEFAULT_PYTHON: "3.12"
ALL_PYTHON_VERSIONS: "['3.12']"
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
# 10.3 is the oldest supported version
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
# 10.6 is the current long-term-support
@ -622,13 +622,13 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.3.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.1.2
with:
@ -819,11 +819,7 @@ jobs:
needs:
- info
- base
strategy:
fail-fast: false
matrix:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
name: Split tests for full run Python ${{ matrix.python-version }}
name: Split tests for full run
steps:
- name: Install additional OS dependencies
run: |
@ -836,11 +832,11 @@ jobs:
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.3.0
with:
python-version: ${{ matrix.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
@ -858,7 +854,7 @@ jobs:
- name: Upload pytest_buckets
uses: actions/upload-artifact@v4.4.3
with:
name: pytest_buckets-${{ matrix.python-version }}
name: pytest_buckets
path: pytest_buckets.txt
overwrite: true
@ -923,7 +919,7 @@ jobs:
- name: Download pytest_buckets
uses: actions/download-artifact@v4.1.8
with:
name: pytest_buckets-${{ matrix.python-version }}
name: pytest_buckets
- name: Compile English translations
run: |
. venv/bin/activate
@ -949,6 +945,7 @@ jobs:
--timeout=9 \
--durations=10 \
--numprocesses auto \
--snapshot-details \
--dist=loadfile \
${cov_params[@]} \
-o console_output_style=count \
@ -1071,6 +1068,7 @@ jobs:
-qq \
--timeout=20 \
--numprocesses 1 \
--snapshot-details \
${cov_params[@]} \
-o console_output_style=count \
--durations=10 \
@ -1199,6 +1197,7 @@ jobs:
-qq \
--timeout=9 \
--numprocesses 1 \
--snapshot-details \
${cov_params[@]} \
-o console_output_style=count \
--durations=0 \
@ -1249,12 +1248,11 @@ jobs:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v5.0.7
with:
fail_ci_if_error: true
flags: full-suite
token: ${{ secrets.CODECOV_TOKEN }}
version: v0.6.0
pytest-partial:
runs-on: ubuntu-24.04
@ -1345,6 +1343,7 @@ jobs:
-qq \
--timeout=9 \
--numprocesses auto \
--snapshot-details \
${cov_params[@]} \
-o console_output_style=count \
--durations=0 \
@ -1387,8 +1386,7 @@ jobs:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v5.0.7
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
version: v0.6.0

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.27.0
uses: github/codeql-action/init@v3.27.5
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.27.0
uses: github/codeql-action/analyze@v3.27.5
with:
category: "/language:python"

View File

@ -112,7 +112,7 @@ jobs:
strategy:
fail-fast: false
matrix:
abi: ["cp312"]
abi: ["cp312", "cp313"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
@ -135,15 +135,15 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
skip-binary: aiohttp;multidict;yarl
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements.txt"
@ -156,7 +156,7 @@ jobs:
strategy:
fail-fast: false
matrix:
abi: ["cp312"]
abi: ["cp312", "cp313"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
@ -198,6 +198,7 @@ jobs:
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
- name: Create requirements for cython<3
if: matrix.abi == 'cp312'
run: |
# Some dependencies still require 'cython<3'
# and don't yet use isolated build environments.
@ -208,7 +209,8 @@ jobs:
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
- name: Build wheels (old cython)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.11.0
if: matrix.abi == 'cp312'
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@ -223,43 +225,43 @@ jobs:
pip: "'cython<3'"
- name: Build wheels (part 1)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
- name: Build wheels (part 2)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
- name: Build wheels (part 3)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.11.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.1
rev: v0.8.0
hooks:
- id: ruff
args:
@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v5.0.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
@ -83,7 +83,7 @@ repos:
pass_filenames: false
language: script
types: [text]
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
- id: hassfest-metadata
name: hassfest-metadata
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker

View File

@ -324,11 +324,13 @@ homeassistant.components.moon.*
homeassistant.components.mopeka.*
homeassistant.components.motionmount.*
homeassistant.components.mqtt.*
homeassistant.components.music_assistant.*
homeassistant.components.my.*
homeassistant.components.mysensors.*
homeassistant.components.myuplink.*
homeassistant.components.nam.*
homeassistant.components.nanoleaf.*
homeassistant.components.nasweb.*
homeassistant.components.neato.*
homeassistant.components.nest.*
homeassistant.components.netatmo.*
@ -338,6 +340,7 @@ homeassistant.components.nfandroidtv.*
homeassistant.components.nightscout.*
homeassistant.components.nissan_leaf.*
homeassistant.components.no_ip.*
homeassistant.components.nordpool.*
homeassistant.components.notify.*
homeassistant.components.notion.*
homeassistant.components.number.*
@ -382,6 +385,7 @@ homeassistant.components.recollect_waste.*
homeassistant.components.recorder.*
homeassistant.components.remote.*
homeassistant.components.renault.*
homeassistant.components.reolink.*
homeassistant.components.repairs.*
homeassistant.components.rest.*
homeassistant.components.rest_command.*
@ -434,6 +438,7 @@ homeassistant.components.starlink.*
homeassistant.components.statistics.*
homeassistant.components.steamist.*
homeassistant.components.stookalert.*
homeassistant.components.stookwijzer.*
homeassistant.components.stream.*
homeassistant.components.streamlabswater.*
homeassistant.components.stt.*

16
.vscode/tasks.json vendored
View File

@ -87,6 +87,22 @@
},
"problemMatcher": []
},
{
"label": "Update syrupy snapshots",
"detail": "Update syrupy snapshots for a given integration.",
"type": "shell",
"command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update",
"dependsOn": ["Compile English translations"],
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Generate Requirements",
"type": "shell",

View File

@ -40,6 +40,8 @@ build.json @home-assistant/supervisor
# Integrations
/homeassistant/components/abode/ @shred86
/tests/components/abode/ @shred86
/homeassistant/components/acaia/ @zweckj
/tests/components/acaia/ @zweckj
/homeassistant/components/accuweather/ @bieniu
/tests/components/accuweather/ @bieniu
/homeassistant/components/acmeda/ @atmurray
@ -496,8 +498,8 @@ build.json @home-assistant/supervisor
/tests/components/freebox/ @hacf-fr @Quentame
/homeassistant/components/freedompro/ @stefano055415
/tests/components/freedompro/ @stefano055415
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
/tests/components/fritzbox/ @mib1185 @flabbamann
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
@ -586,8 +588,8 @@ build.json @home-assistant/supervisor
/tests/components/group/ @home-assistant/core
/homeassistant/components/guardian/ @bachya
/tests/components/guardian/ @bachya
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
/homeassistant/components/habitica/ @tr4nt0r
/tests/components/habitica/ @tr4nt0r
/homeassistant/components/hardkernel/ @home-assistant/core
/tests/components/hardkernel/ @home-assistant/core
/homeassistant/components/hardware/ @home-assistant/core
@ -954,6 +956,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
/homeassistant/components/music_assistant/ @music-assistant
/tests/components/music_assistant/ @music-assistant
/homeassistant/components/mutesync/ @currentoor
/tests/components/mutesync/ @currentoor
/homeassistant/components/my/ @home-assistant/core
@ -968,8 +972,8 @@ build.json @home-assistant/supervisor
/tests/components/nam/ @bieniu
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
/tests/components/nanoleaf/ @milanmeu @joostlek
/homeassistant/components/neato/ @Santobert
/tests/components/neato/ @Santobert
/homeassistant/components/nasweb/ @nasWebio
/tests/components/nasweb/ @nasWebio
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
/homeassistant/components/ness_alarm/ @nickw444
/tests/components/ness_alarm/ @nickw444
@ -1008,6 +1012,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/noaa_tides/ @jdelaney72
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
/tests/components/nobo_hub/ @echoromeo @oyvindwe
/homeassistant/components/nordpool/ @gjohansson-ST
/tests/components/nordpool/ @gjohansson-ST
/homeassistant/components/notify/ @home-assistant/core
/tests/components/notify/ @home-assistant/core
/homeassistant/components/notify_events/ @matrozov @papajojo
@ -1338,6 +1344,8 @@ build.json @home-assistant/supervisor
/tests/components/siren/ @home-assistant/core @raman325
/homeassistant/components/sisyphus/ @jkeljo
/homeassistant/components/sky_hub/ @rogerselwyn
/homeassistant/components/sky_remote/ @dunnmj @saty9
/tests/components/sky_remote/ @dunnmj @saty9
/homeassistant/components/skybell/ @tkdrob
/tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau
@ -1479,8 +1487,8 @@ build.json @home-assistant/supervisor
/tests/components/tedee/ @patrickhilker @zweckj
/homeassistant/components/tellduslive/ @fredrike
/tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
/tests/components/template/ @PhracturedBlue @home-assistant/core
/homeassistant/components/tesla_fleet/ @Bre77
/tests/components/tesla_fleet/ @Bre77
/homeassistant/components/tesla_wall_connector/ @einarhauks
@ -1565,6 +1573,8 @@ build.json @home-assistant/supervisor
/tests/components/unifi/ @Kane610
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
/homeassistant/components/unifiled/ @florisvdk
/homeassistant/components/unifiprotect/ @RaHehl
/tests/components/unifiprotect/ @RaHehl
/homeassistant/components/upb/ @gwww
/tests/components/upb/ @gwww
/homeassistant/components/upc_connect/ @pvizeli @fabaff

View File

@ -13,7 +13,7 @@ ENV \
ARG QEMU_CPU
# Install uv
RUN pip3 install uv==0.4.28
RUN pip3 install uv==0.5.4
WORKDIR /usr/src

View File

@ -35,6 +35,9 @@ RUN \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
# Install uv
RUN pip3 install uv

View File

@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@ -9,6 +9,7 @@ import os
import sys
import threading
from .backup_restore import restore_backup
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
FAULT_LOG_FILENAME = "home-assistant.log.fault"
@ -182,6 +183,9 @@ def main() -> int:
return scripts.run(args.script)
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
if restore_backup(config_dir):
return RESTART_EXIT_CODE
ensure_config_path(config_dir)
# pylint: disable-next=import-outside-toplevel

View File

@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads
JWT_TOKEN_CACHE_SIZE = 16
MAX_TOKEN_SIZE = 8192
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
"require": []

View File

@ -177,17 +177,17 @@ class TotpAuthModule(MultiFactorAuthModule):
class TotpSetupFlow(SetupFlow):
"""Handler for the setup flow."""
_auth_module: TotpAuthModule
_ota_secret: str
_url: str
_image: str
def __init__(
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
) -> None:
"""Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user.id)
# to fix typing complaint
self._auth_module: TotpAuthModule = auth_module
self._user = user
self._ota_secret: str = ""
self._url: str | None = None
self._image: str | None = None
async def async_step_init(
self, user_input: dict[str, str] | None = None
@ -214,12 +214,11 @@ class TotpSetupFlow(SetupFlow):
errors["base"] = "invalid_code"
else:
hass = self._auth_module.hass
(
self._ota_secret,
self._url,
self._image,
) = await hass.async_add_executor_job(
) = await self._auth_module.hass.async_add_executor_job(
_generate_secret_and_qr_code,
str(self._user.name),
)

View File

@ -0,0 +1,126 @@
"""Home Assistant module to handle restoring backups."""
from dataclasses import dataclass
import json
import logging
from pathlib import Path
import shutil
import sys
from tempfile import TemporaryDirectory
from awesomeversion import AwesomeVersion
import securetar
from .const import __version__ as HA_VERSION
RESTORE_BACKUP_FILE = ".HA_RESTORE"
KEEP_PATHS = ("backups",)
_LOGGER = logging.getLogger(__name__)
@dataclass
class RestoreBackupFileContent:
"""Definition for restore backup file content."""
backup_file_path: Path
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
"""Return the contents of the restore backup file."""
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
try:
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
return RestoreBackupFileContent(
backup_file_path=Path(instruction_content["path"])
)
except (FileNotFoundError, json.JSONDecodeError):
return None
def _clear_configuration_directory(config_dir: Path) -> None:
"""Delete all files and directories in the config directory except for the backups directory."""
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
config_contents = sorted(
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
)
for entry in config_contents:
entrypath = config_dir.joinpath(entry)
if entrypath.is_file():
entrypath.unlink()
elif entrypath.is_dir():
shutil.rmtree(entrypath)
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
"""Extract the backup file to the config directory."""
with (
TemporaryDirectory() as tempdir,
securetar.SecureTarFile(
backup_file_path,
gzip=False,
mode="r",
) as ostf,
):
ostf.extractall(
path=Path(tempdir, "extracted"),
members=securetar.secure_path(ostf),
filter="fully_trusted",
)
backup_meta_file = Path(tempdir, "extracted", "backup.json")
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
if (
backup_meta_version := AwesomeVersion(
backup_meta["homeassistant"]["version"]
)
) > HA_VERSION:
raise ValueError(
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
)
with securetar.SecureTarFile(
Path(
tempdir,
"extracted",
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
),
gzip=backup_meta["compressed"],
mode="r",
) as istf:
for member in istf.getmembers():
if member.name == "data":
continue
member.name = member.name.replace("data/", "")
_clear_configuration_directory(config_dir)
istf.extractall(
path=config_dir,
members=[
member
for member in securetar.secure_path(istf)
if member.name != "data"
],
filter="fully_trusted",
)
def restore_backup(config_dir_path: str) -> bool:
"""Restore the backup file if any.
Returns True if a restore backup file was found and restored, False otherwise.
"""
config_dir = Path(config_dir_path)
if not (restore_content := restore_backup_file_content(config_dir)):
return False
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
backup_file_path = restore_content.backup_file_path
_LOGGER.info("Restoring %s", backup_file_path)
try:
_extract_backup(config_dir, backup_file_path)
except FileNotFoundError as err:
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
_LOGGER.info("Restore complete, restarting")
return True

View File

@ -515,7 +515,7 @@ async def async_from_config_dict(
issue_registry.async_create_issue(
hass,
core.DOMAIN,
"python_version",
f"python_version_{required_python_version}",
is_fixable=False,
severity=issue_registry.IssueSeverity.WARNING,
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,

View File

@ -0,0 +1,5 @@
{
"domain": "sky",
"name": "Sky",
"integrations": ["sky_hub", "sky_remote"]
}

View File

@ -112,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if user_input is None:
return self.async_show_form(
step_id="user", data_schema=vol.Schema(self.data_schema)

View File

@ -9,5 +9,6 @@
},
"iot_class": "cloud_push",
"loggers": ["jaraco.abode", "lomond"],
"requirements": ["jaraco.abode==6.2.1"]
"requirements": ["jaraco.abode==6.2.1"],
"single_config_entry": true
}

View File

@ -28,7 +28,6 @@
"invalid_mfa_code": "Invalid MFA code"
},
"abort": {
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
},

View File

@ -0,0 +1,31 @@
"""Initialize the Acaia component."""
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.SENSOR,
]
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
"""Set up acaia as config entry."""
coordinator = AcaiaCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -0,0 +1,58 @@
"""Binary sensor platform for Acaia scales."""
from collections.abc import Callable
from dataclasses import dataclass
from aioacaia.acaiascale import AcaiaScale
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
@dataclass(kw_only=True, frozen=True)
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Description for Acaia binary sensor entities."""
is_on_fn: Callable[[AcaiaScale], bool]
BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = (
AcaiaBinarySensorEntityDescription(
key="timer_running",
translation_key="timer_running",
device_class=BinarySensorDeviceClass.RUNNING,
is_on_fn=lambda scale: scale.timer_running,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up binary sensors."""
coordinator = entry.runtime_data
async_add_entities(
AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS
)
class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity):
"""Representation of an Acaia binary sensor."""
entity_description: AcaiaBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.entity_description.is_on_fn(self._scale)

View File

@ -0,0 +1,63 @@
"""Button entities for Acaia scales."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any
from aioacaia.acaiascale import AcaiaScale
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
PARALLEL_UPDATES = 0
@dataclass(kw_only=True, frozen=True)
class AcaiaButtonEntityDescription(ButtonEntityDescription):
"""Description for acaia button entities."""
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
AcaiaButtonEntityDescription(
key="tare",
translation_key="tare",
press_fn=lambda scale: scale.tare(),
),
AcaiaButtonEntityDescription(
key="reset_timer",
translation_key="reset_timer",
press_fn=lambda scale: scale.reset_timer(),
),
AcaiaButtonEntityDescription(
key="start_stop",
translation_key="start_stop",
press_fn=lambda scale: scale.start_stop_timer(),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up button entities and services."""
coordinator = entry.runtime_data
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
class AcaiaButton(AcaiaEntity, ButtonEntity):
"""Representation of an Acaia button."""
entity_description: AcaiaButtonEntityDescription
async def async_press(self) -> None:
"""Handle the button press."""
await self.entity_description.press_fn(self._scale)

View File

@ -0,0 +1,149 @@
"""Config flow for Acaia integration."""
import logging
from typing import Any
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
from aioacaia.helpers import is_new_scale
import voluptuous as vol
from homeassistant.components.bluetooth import (
BluetoothServiceInfoBleak,
async_discovered_service_info,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ADDRESS, CONF_NAME
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
_LOGGER = logging.getLogger(__name__)
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for acaia."""
def __init__(self) -> None:
"""Initialize the config flow."""
self._discovered: dict[str, Any] = {}
self._discovered_devices: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
errors: dict[str, str] = {}
if user_input is not None:
mac = user_input[CONF_ADDRESS]
try:
is_new_style_scale = await is_new_scale(mac)
except AcaiaDeviceNotFound:
errors["base"] = "device_not_found"
except AcaiaError:
_LOGGER.exception("Error occurred while connecting to the scale")
errors["base"] = "unknown"
except AcaiaUnknownDevice:
return self.async_abort(reason="unsupported_device")
else:
await self.async_set_unique_id(format_mac(mac))
self._abort_if_unique_id_configured()
if not errors:
return self.async_create_entry(
title=self._discovered_devices[mac],
data={
CONF_ADDRESS: mac,
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
},
)
for device in async_discovered_service_info(self.hass):
self._discovered_devices[device.address] = device.name
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")
options = [
SelectOptionDict(
value=device_mac,
label=f"{device_name} ({device_mac})",
)
for device_mac, device_name in self._discovered_devices.items()
]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_ADDRESS): SelectSelector(
SelectSelectorConfig(
options=options,
mode=SelectSelectorMode.DROPDOWN,
)
)
}
),
errors=errors,
)
async def async_step_bluetooth(
self, discovery_info: BluetoothServiceInfoBleak
) -> ConfigFlowResult:
"""Handle a discovered Bluetooth device."""
self._discovered[CONF_ADDRESS] = discovery_info.address
self._discovered[CONF_NAME] = discovery_info.name
await self.async_set_unique_id(format_mac(discovery_info.address))
self._abort_if_unique_id_configured()
try:
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
discovery_info.address
)
except AcaiaDeviceNotFound:
_LOGGER.debug("Device not found during discovery")
return self.async_abort(reason="device_not_found")
except AcaiaError:
_LOGGER.debug(
"Error occurred while connecting to the scale during discovery",
exc_info=True,
)
return self.async_abort(reason="unknown")
except AcaiaUnknownDevice:
_LOGGER.debug("Unsupported device during discovery")
return self.async_abort(reason="unsupported_device")
return await self.async_step_bluetooth_confirm()
async def async_step_bluetooth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle confirmation of Bluetooth discovery."""
if user_input is not None:
return self.async_create_entry(
title=self._discovered[CONF_NAME],
data={
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
},
)
self.context["title_placeholders"] = placeholders = {
CONF_NAME: self._discovered[CONF_NAME]
}
self._set_confirm_only()
return self.async_show_form(
step_id="bluetooth_confirm",
description_placeholders=placeholders,
)

View File

@ -0,0 +1,4 @@
"""Constants for component."""
DOMAIN = "acaia"
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"

View File

@ -0,0 +1,86 @@
"""Coordinator for Acaia integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from aioacaia.acaiascale import AcaiaScale
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_IS_NEW_STYLE_SCALE
SCAN_INTERVAL = timedelta(seconds=15)
_LOGGER = logging.getLogger(__name__)
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
class AcaiaCoordinator(DataUpdateCoordinator[None]):
"""Class to handle fetching data from the scale."""
config_entry: AcaiaConfigEntry
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
name="acaia coordinator",
update_interval=SCAN_INTERVAL,
config_entry=entry,
)
self._scale = AcaiaScale(
address_or_ble_device=entry.data[CONF_ADDRESS],
name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=self.async_update_listeners,
)
@property
def scale(self) -> AcaiaScale:
"""Return the scale object."""
return self._scale
async def _async_update_data(self) -> None:
"""Fetch data."""
# scale is already connected, return
if self._scale.connected:
return
# scale is not connected, try to connect
try:
await self._scale.connect(setup_tasks=False)
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
_LOGGER.debug(
"Could not connect to scale: %s, Error: %s",
self.config_entry.data[CONF_ADDRESS],
ex,
)
self._scale.device_disconnected_handler(notify=False)
return
# connected, set up background tasks
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
hass=self.hass,
target=self._scale.send_heartbeats(),
name="acaia_heartbeat_task",
)
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
self._scale.process_queue_task = (
self.config_entry.async_create_background_task(
hass=self.hass,
target=self._scale.process_queue(),
name="acaia_process_queue_task",
)
)

View File

@ -0,0 +1,31 @@
"""Diagnostics support for Acaia."""
from __future__ import annotations
from dataclasses import asdict
from typing import Any
from homeassistant.core import HomeAssistant
from . import AcaiaConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
scale = coordinator.scale
# collect all data sources
return {
"model": scale.model,
"device_state": (
asdict(scale.device_state) if scale.device_state is not None else ""
),
"mac": scale.mac,
"last_disconnect_time": scale.last_disconnect_time,
"timer": scale.timer,
"weight": scale.weight,
}

View File

@ -0,0 +1,46 @@
"""Base class for Acaia entities."""
from dataclasses import dataclass
from homeassistant.helpers.device_registry import (
CONNECTION_BLUETOOTH,
DeviceInfo,
format_mac,
)
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AcaiaCoordinator
@dataclass
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
"""Common elements for all entities."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AcaiaCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._scale = coordinator.scale
formatted_mac = format_mac(self._scale.mac)
self._attr_unique_id = f"{formatted_mac}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, formatted_mac)},
manufacturer="Acaia",
model=self._scale.model,
suggested_area="Kitchen",
connections={(CONNECTION_BLUETOOTH, self._scale.mac)},
)
@property
def available(self) -> bool:
"""Returns whether entity is available."""
return super().available and self._scale.connected

View File

@ -0,0 +1,24 @@
{
"entity": {
"binary_sensor": {
"timer_running": {
"default": "mdi:timer",
"state": {
"on": "mdi:timer-play",
"off": "mdi:timer-off"
}
}
},
"button": {
"tare": {
"default": "mdi:scale-balance"
},
"reset_timer": {
"default": "mdi:timer-refresh"
},
"start_stop": {
"default": "mdi:timer-play"
}
}
}
}

View File

@ -0,0 +1,29 @@
{
"domain": "acaia",
"name": "Acaia",
"bluetooth": [
{
"manufacturer_id": 16962
},
{
"local_name": "ACAIA*"
},
{
"local_name": "PYXIS-*"
},
{
"local_name": "LUNAR-*"
},
{
"local_name": "PROCHBT001"
}
],
"codeowners": ["@zweckj"],
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/acaia",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["aioacaia"],
"requirements": ["aioacaia==0.1.10"]
}

View File

@ -0,0 +1,106 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: |
No explicit event subscriptions.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup:
status: exempt
comment: |
Device is expected to be offline most of the time, but needs to connect quickly once available.
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable:
status: done
comment: |
Handled by coordinator.
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
No authentication required.
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
No IP discovery.
discovery:
status: done
comment: |
Bluetooth discovery.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
Device type integration.
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
No noisy/non-essential entities.
entity-translations: done
exception-translations:
status: exempt
comment: |
No custom exceptions.
icon-translations: done
reconfiguration-flow:
status: exempt
comment: |
Only parameter that could be changed (MAC = unique_id) would force a new config entry.
repair-issues:
status: exempt
comment: |
No repairs/issues.
stale-devices:
status: exempt
comment: |
Device type integration.
# Platinum
async-dependency: done
inject-websession:
status: exempt
comment: |
Bluetooth connection.
strict-typing: done

View File

@ -0,0 +1,143 @@
"""Sensor platform for Acaia."""
from collections.abc import Callable
from dataclasses import dataclass
from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale
from aioacaia.const import UnitMass as AcaiaUnitOfMass
from homeassistant.components.sensor import (
RestoreSensor,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorExtraStoredData,
SensorStateClass,
)
from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
@dataclass(kw_only=True, frozen=True)
class AcaiaSensorEntityDescription(SensorEntityDescription):
"""Description for Acaia sensor entities."""
value_fn: Callable[[AcaiaScale], int | float | None]
@dataclass(kw_only=True, frozen=True)
class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription):
"""Description for Acaia sensor entities with dynamic units."""
unit_fn: Callable[[AcaiaDeviceState], str] | None = None
SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
AcaiaDynamicUnitSensorEntityDescription(
key="weight",
device_class=SensorDeviceClass.WEIGHT,
native_unit_of_measurement=UnitOfMass.GRAMS,
state_class=SensorStateClass.MEASUREMENT,
unit_fn=lambda data: (
UnitOfMass.OUNCES
if data.units == AcaiaUnitOfMass.OUNCES
else UnitOfMass.GRAMS
),
value_fn=lambda scale: scale.weight,
),
AcaiaDynamicUnitSensorEntityDescription(
key="flow_rate",
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
suggested_display_precision=1,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda scale: scale.flow_rate,
),
)
RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
AcaiaSensorEntityDescription(
key="battery",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda scale: (
scale.device_state.battery_level if scale.device_state else None
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up sensors."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS
]
entities.extend(
AcaiaRestoreSensor(coordinator, entity_description)
for entity_description in RESTORE_SENSORS
)
async_add_entities(entities)
class AcaiaSensor(AcaiaEntity, SensorEntity):
"""Representation of an Acaia sensor."""
entity_description: AcaiaDynamicUnitSensorEntityDescription
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of this entity."""
if (
self._scale.device_state is not None
and self.entity_description.unit_fn is not None
):
return self.entity_description.unit_fn(self._scale.device_state)
return self.entity_description.native_unit_of_measurement
@property
def native_value(self) -> int | float | None:
"""Return the state of the entity."""
return self.entity_description.value_fn(self._scale)
class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
"""Representation of an Acaia sensor with restore capabilities."""
entity_description: AcaiaSensorEntityDescription
_restored_data: SensorExtraStoredData | None = None
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
self._restored_data = await self.async_get_last_sensor_data()
if self._restored_data is not None:
self._attr_native_value = self._restored_data.native_value
self._attr_native_unit_of_measurement = (
self._restored_data.native_unit_of_measurement
)
if self._scale.device_state is not None:
self._attr_native_value = self.entity_description.value_fn(self._scale)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self._scale.device_state is not None:
self._attr_native_value = self.entity_description.value_fn(self._scale)
self._async_write_ha_state()
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available or self._restored_data is not None

View File

@ -0,0 +1,46 @@
{
"config": {
"flow_title": "{name}",
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
"unsupported_device": "This device is not supported."
},
"error": {
"device_not_found": "Device could not be found.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"bluetooth_confirm": {
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
},
"user": {
"description": "[%key:component::bluetooth::config::step::user::description%]",
"data": {
"address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "Select Acaia scale you want to set up"
}
}
}
},
"entity": {
"binary_sensor": {
"timer_running": {
"name": "Timer running"
}
},
"button": {
"tare": {
"name": "Tare"
},
"reset_timer": {
"name": "Reset timer"
},
"start_stop": {
"name": "Start/stop timer"
}
}
}
}

View File

@ -7,7 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"quality_scale": "platinum",
"requirements": ["accuweather==3.0.0"],
"requirements": ["accuweather==4.0.0"],
"single_config_entry": true
}

View File

@ -4,5 +4,6 @@
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
"iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["pyserial==3.5"]
}

View File

@ -3,5 +3,6 @@
"name": "Actiontec",
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/actiontec",
"iot_class": "local_polling"
"iot_class": "local_polling",
"quality_scale": "legacy"
}

View File

@ -37,7 +37,7 @@ STATE_KEY_POSITION = "position"
PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ADS_VAR): cv.string,
vol.Required(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/ads",
"iot_class": "local_push",
"loggers": ["pyads"],
"quality_scale": "legacy",
"requirements": ["pyads==3.4.0"]
}

View File

@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
"iot_class": "local_polling",
"loggers": ["advantage_air"],
"quality_scale": "platinum",
"requirements": ["advantage-air==0.4.4"]
}

View File

@ -1,10 +1,9 @@
"""The AEMET OpenData component."""
from dataclasses import dataclass
import logging
from aemet_opendata.exceptions import AemetError, TownNotFound
from aemet_opendata.interface import AEMET, ConnectionOptions
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from .const import CONF_STATION_UPDATES, PLATFORMS
from .coordinator import WeatherUpdateCoordinator
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
type AemetConfigEntry = ConfigEntry[AemetData]
@dataclass
class AemetData:
"""Aemet runtime data."""
name: str
coordinator: WeatherUpdateCoordinator
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
"""Set up AEMET OpenData as config entry."""
@ -34,9 +23,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
api_key = entry.data[CONF_API_KEY]
latitude = entry.data[CONF_LATITUDE]
longitude = entry.data[CONF_LONGITUDE]
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
update_features: int = UpdateFeature.FORECAST
if entry.options.get(CONF_STATION_UPDATES, True):
update_features |= UpdateFeature.STATION
options = ConnectionOptions(api_key, station_updates)
options = ConnectionOptions(api_key, update_features)
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
try:
await aemet.select_coordinates(latitude, longitude)
@ -46,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
except AemetError as err:
raise ConfigEntryNotReady(err) from err
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet)
await weather_coordinator.async_config_entry_first_refresh()
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)

View File

@ -45,7 +45,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
options = ConnectionOptions(user_input[CONF_API_KEY], False)
options = ConnectionOptions(user_input[CONF_API_KEY])
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
try:
await aemet.select_coordinates(latitude, longitude)

View File

@ -3,6 +3,7 @@
from __future__ import annotations
from asyncio import timeout
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import Any, Final, cast
@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value
from aemet_opendata.interface import AEMET
from homeassistant.components.weather import Forecast
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__)
API_TIMEOUT: Final[int] = 120
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
type AemetConfigEntry = ConfigEntry[AemetData]
@dataclass
class AemetData:
"""Aemet runtime data."""
name: str
coordinator: WeatherUpdateCoordinator
class WeatherUpdateCoordinator(DataUpdateCoordinator):
"""Weather data update coordinator."""
@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
def __init__(
self,
hass: HomeAssistant,
entry: AemetConfigEntry,
aemet: AEMET,
) -> None:
"""Initialize coordinator."""
@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
super().__init__(
hass,
_LOGGER,
config_entry=entry,
name=DOMAIN,
update_interval=WEATHER_UPDATE_INTERVAL,
)

View File

@ -15,7 +15,7 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from . import AemetConfigEntry
from .coordinator import AemetConfigEntry
TO_REDACT_CONFIG = [
CONF_API_KEY,

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aemet",
"iot_class": "cloud_polling",
"loggers": ["aemet_opendata"],
"requirements": ["AEMET-OpenData==0.5.4"]
"requirements": ["AEMET-OpenData==0.6.3"]
}

View File

@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import dt as dt_util
from . import AemetConfigEntry
from .const import (
ATTR_API_CONDITION,
ATTR_API_FORECAST_CONDITION,
@ -87,7 +86,7 @@ from .const import (
ATTR_API_WIND_SPEED,
CONDITIONS_MAP,
)
from .coordinator import WeatherUpdateCoordinator
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
from .entity import AemetEntity

View File

@ -27,9 +27,8 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AemetConfigEntry
from .const import CONDITIONS_MAP
from .coordinator import WeatherUpdateCoordinator
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
from .entity import AemetEntity

View File

@ -0,0 +1,80 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow:
status: exempt
comment: |
This integration does not require authentication.
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
This integration has a fixed single device.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration doesn't have any cases where raising an issue is needed.
stale-devices:
status: exempt
comment: |
This integration has a fixed single device.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -7,6 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["airly"],
"quality_scale": "platinum",
"requirements": ["airly==1.1.0"]
}

View File

@ -1,5 +1,7 @@
"""Config flow for AirNow integration."""
from __future__ import annotations
import logging
from typing import Any
@ -12,7 +14,6 @@ from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
OptionsFlowWithConfigEntry,
)
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.core import HomeAssistant, callback
@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
@callback
def async_get_options_flow(
config_entry: ConfigEntry,
) -> OptionsFlow:
) -> AirNowOptionsFlowHandler:
"""Return the options flow."""
return AirNowOptionsFlowHandler(config_entry)
return AirNowOptionsFlowHandler()
class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
class AirNowOptionsFlowHandler(OptionsFlow):
"""Handle an options flow for AirNow."""
async def async_step_init(
@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
return self.async_create_entry(data=user_input)
options_schema = vol.Schema(
{
vol.Optional(CONF_RADIUS): vol.All(
int,
vol.Range(min=5),
),
}
{vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))}
)
return self.async_show_form(

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
"iot_class": "local_push",
"loggers": ["airtouch5py"],
"requirements": ["airtouch5py==0.2.10"]
"requirements": ["airtouch5py==0.2.11"]
}

View File

@ -35,6 +35,7 @@ from homeassistant.helpers.deprecation import (
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity_platform import EntityPlatform
from homeassistant.helpers.frame import ReportBehavior, report_usage
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
@ -163,7 +164,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
_alarm_control_panel_option_default_code: str | None = None
__alarm_legacy_state: bool = False
__alarm_legacy_state_reported: bool = False
def __init_subclass__(cls, **kwargs: Any) -> None:
"""Post initialisation processing."""
@ -173,17 +173,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
# setting the state directly.
cls.__alarm_legacy_state = True
def __setattr__(self, __name: str, __value: Any) -> None:
def __setattr__(self, name: str, value: Any, /) -> None:
"""Set attribute.
Deprecation warning if setting '_attr_state' directly
unless already reported.
"""
if __name == "_attr_state":
if self.__alarm_legacy_state_reported is not True:
if name == "_attr_state":
self._report_deprecated_alarm_state_handling()
self.__alarm_legacy_state_reported = True
return super().__setattr__(__name, __value)
return super().__setattr__(name, value)
@callback
def add_to_platform_start(
@ -194,7 +192,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
) -> None:
"""Start adding an entity to a platform."""
super().add_to_platform_start(hass, platform, parallel_updates)
if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported:
if self.__alarm_legacy_state:
self._report_deprecated_alarm_state_handling()
@callback
@ -203,18 +201,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
Integrations should implement alarm_state instead of using state directly.
"""
self.__alarm_legacy_state_reported = True
if "custom_components" in type(self).__module__:
# Do not report on core integrations as they have been fixed.
report_issue = "report it to the custom integration author."
_LOGGER.warning(
"Entity %s (%s) is setting state directly"
" which will stop working in HA Core 2025.11."
" Entities should implement the 'alarm_state' property and"
" return its state using the AlarmControlPanelState enum, please %s",
self.entity_id,
type(self),
report_issue,
report_usage(
"is setting state directly."
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
" property and return its state using the AlarmControlPanelState enum",
core_integration_behavior=ReportBehavior.ERROR,
custom_integration_behavior=ReportBehavior.LOG,
breaks_in_ha_version="2025.11",
integration_domain=self.platform.platform_name if self.platform else None,
exclude_integrations={DOMAIN},
)
@final
@ -275,7 +270,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
"""Check if arm code is required, raise if no code is given."""
if not (_code := self.code_or_default_code(code)) and self.code_arm_required:
raise ServiceValidationError(
f"Arming requires a code but none was given for {self.entity_id}",
translation_domain=DOMAIN,
translation_key="code_arm_required",
translation_placeholders={

View File

@ -130,7 +130,7 @@
},
"alarm_trigger": {
"name": "Trigger",
"description": "Enables an external alarm trigger.",
"description": "Trigger the alarm manually.",
"fields": {
"code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -138,5 +138,10 @@
}
}
}
},
"exceptions": {
"code_arm_required": {
"message": "Arming requires a code but none was given for {entity_id}."
}
}
}

View File

@ -816,6 +816,12 @@ class AlexaPlaybackController(AlexaCapability):
"""
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
operations: dict[
cover.CoverEntityFeature | media_player.MediaPlayerEntityFeature, str
]
if self.entity.domain == cover.DOMAIN:
operations = {cover.CoverEntityFeature.STOP: "Stop"}
else:
operations = {
media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next",
media_player.MediaPlayerEntityFeature.PAUSE: "Pause",

View File

@ -559,6 +559,10 @@ class CoverCapabilities(AlexaEntity):
)
if supported & cover.CoverEntityFeature.SET_TILT_POSITION:
yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt")
if supported & (
cover.CoverEntityFeature.STOP | cover.CoverEntityFeature.STOP_TILT
):
yield AlexaPlaybackController(self.entity, instance=f"{cover.DOMAIN}.stop")
yield AlexaEndpointHealth(self.hass, self.entity)
yield Alexa(self.entity)

View File

@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable, Coroutine
import logging
import math
@ -764,6 +765,22 @@ async def async_api_stop(
entity = directive.entity
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
if entity.domain == cover.DOMAIN:
supported: int = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
feature_services: dict[int, str] = {
cover.CoverEntityFeature.STOP.value: cover.SERVICE_STOP_COVER,
cover.CoverEntityFeature.STOP_TILT.value: cover.SERVICE_STOP_COVER_TILT,
}
await asyncio.gather(
*(
hass.services.async_call(
entity.domain, service, data, blocking=False, context=context
)
for feature, service in feature_services.items()
if feature & supported
)
)
else:
await hass.services.async_call(
entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context
)

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/alpha_vantage",
"iot_class": "cloud_polling",
"loggers": ["alpha_vantage"],
"quality_scale": "legacy",
"requirements": ["alpha-vantage==2.3.1"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
"iot_class": "cloud_push",
"loggers": ["boto3", "botocore", "s3transfer"],
"quality_scale": "legacy",
"requirements": ["boto3==1.34.131"]
}

View File

@ -1,7 +1,6 @@
"""Support for Amber Electric."""
from amberelectric import Configuration
from amberelectric.api import amber_api
import amberelectric
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN
@ -15,8 +14,9 @@ type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
"""Set up Amber Electric from a config entry."""
configuration = Configuration(access_token=entry.data[CONF_API_TOKEN])
api_instance = amber_api.AmberApi.create(configuration)
configuration = amberelectric.Configuration(access_token=entry.data[CONF_API_TOKEN])
api_client = amberelectric.ApiClient(configuration)
api_instance = amberelectric.AmberApi(api_client)
site_id = entry.data[CONF_SITE_ID]
coordinator = AmberUpdateCoordinator(hass, api_instance, site_id)

View File

@ -3,8 +3,8 @@
from __future__ import annotations
import amberelectric
from amberelectric.api import amber_api
from amberelectric.model.site import Site, SiteStatus
from amberelectric.models.site import Site
from amberelectric.models.site_status import SiteStatus
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
@ -23,11 +23,15 @@ API_URL = "https://app.amber.com.au/developers"
def generate_site_selector_name(site: Site) -> str:
"""Generate the name to show in the site drop down in the configuration flow."""
# For some reason the generated API key returns this as any, not a string. Thanks pydantic
nmi = str(site.nmi)
if site.status == SiteStatus.CLOSED:
return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return]
if site.closed_on is None:
return f"{nmi} (Closed)"
return f"{nmi} (Closed: {site.closed_on.isoformat()})"
if site.status == SiteStatus.PENDING:
return site.nmi + " (Pending)" # type: ignore[no-any-return]
return site.nmi # type: ignore[no-any-return]
return f"{nmi} (Pending)"
return nmi
def filter_sites(sites: list[Site]) -> list[Site]:
@ -35,7 +39,7 @@ def filter_sites(sites: list[Site]) -> list[Site]:
filtered: list[Site] = []
filtered_nmi: set[str] = set()
for site in sorted(sites, key=lambda site: site.status.value):
for site in sorted(sites, key=lambda site: site.status):
if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi:
filtered.append(site)
filtered_nmi.add(site.nmi)
@ -56,7 +60,8 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
def _fetch_sites(self, token: str) -> list[Site] | None:
configuration = amberelectric.Configuration(access_token=token)
api: amber_api.AmberApi = amber_api.AmberApi.create(configuration)
api_client = amberelectric.ApiClient(configuration)
api = amberelectric.AmberApi(api_client)
try:
sites: list[Site] = filter_sites(api.get_sites())

View File

@ -5,13 +5,13 @@ from __future__ import annotations
from datetime import timedelta
from typing import Any
from amberelectric import ApiException
from amberelectric.api import amber_api
from amberelectric.model.actual_interval import ActualInterval
from amberelectric.model.channel import ChannelType
from amberelectric.model.current_interval import CurrentInterval
from amberelectric.model.forecast_interval import ForecastInterval
from amberelectric.model.interval import Descriptor
import amberelectric
from amberelectric.models.actual_interval import ActualInterval
from amberelectric.models.channel import ChannelType
from amberelectric.models.current_interval import CurrentInterval
from amberelectric.models.forecast_interval import ForecastInterval
from amberelectric.models.price_descriptor import PriceDescriptor
from amberelectric.rest import ApiException
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -31,22 +31,22 @@ def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) -
def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the general channel."""
return interval.channel_type == ChannelType.GENERAL # type: ignore[no-any-return]
return interval.channel_type == ChannelType.GENERAL
def is_controlled_load(
interval: ActualInterval | CurrentInterval | ForecastInterval,
) -> bool:
"""Return true if the supplied interval is on the controlled load channel."""
return interval.channel_type == ChannelType.CONTROLLED_LOAD # type: ignore[no-any-return]
return interval.channel_type == ChannelType.CONTROLLEDLOAD
def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the feed in channel."""
return interval.channel_type == ChannelType.FEED_IN # type: ignore[no-any-return]
return interval.channel_type == ChannelType.FEEDIN
def normalize_descriptor(descriptor: Descriptor) -> str | None:
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
if descriptor is None:
return None
@ -71,7 +71,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
def __init__(
self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str
self, hass: HomeAssistant, api: amberelectric.AmberApi, site_id: str
) -> None:
"""Initialise the data service."""
super().__init__(
@ -93,12 +93,13 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
"grid": {},
}
try:
data = self._api.get_current_price(self.site_id, next=48)
data = self._api.get_current_prices(self.site_id, next=48)
intervals = [interval.actual_instance for interval in data]
except ApiException as api_exception:
raise UpdateFailed("Missing price data, skipping update") from api_exception
current = [interval for interval in data if is_current(interval)]
forecasts = [interval for interval in data if is_forecast(interval)]
current = [interval for interval in intervals if is_current(interval)]
forecasts = [interval for interval in intervals if is_forecast(interval)]
general = [interval for interval in current if is_general(interval)]
if len(general) == 0:
@ -137,7 +138,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
interval for interval in forecasts if is_feed_in(interval)
]
LOGGER.debug("Fetched new Amber data: %s", data)
LOGGER.debug("Fetched new Amber data: %s", intervals)
return result
async def _async_update_data(self) -> dict[str, Any]:

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
"iot_class": "cloud_polling",
"loggers": ["amberelectric"],
"requirements": ["amberelectric==1.1.1"]
"requirements": ["amberelectric==2.0.12"]
}

View File

@ -8,9 +8,9 @@ from __future__ import annotations
from typing import Any
from amberelectric.model.channel import ChannelType
from amberelectric.model.current_interval import CurrentInterval
from amberelectric.model.forecast_interval import ForecastInterval
from amberelectric.models.channel import ChannelType
from amberelectric.models.current_interval import CurrentInterval
from amberelectric.models.forecast_interval import ForecastInterval
from homeassistant.components.sensor import (
SensorEntity,
@ -52,7 +52,7 @@ class AmberSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity):
self,
coordinator: AmberUpdateCoordinator,
description: SensorEntityDescription,
channel_type: ChannelType,
channel_type: str,
) -> None:
"""Initialize the Sensor."""
super().__init__(coordinator)
@ -73,7 +73,7 @@ class AmberPriceSensor(AmberSensor):
"""Return the current price in $/kWh."""
interval = self.coordinator.data[self.entity_description.key][self.channel_type]
if interval.channel_type == ChannelType.FEED_IN:
if interval.channel_type == ChannelType.FEEDIN:
return format_cents_to_dollars(interval.per_kwh) * -1
return format_cents_to_dollars(interval.per_kwh)
@ -87,9 +87,9 @@ class AmberPriceSensor(AmberSensor):
return data
data["duration"] = interval.duration
data["date"] = interval.date.isoformat()
data["date"] = interval.var_date.isoformat()
data["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
if interval.channel_type == ChannelType.FEED_IN:
if interval.channel_type == ChannelType.FEEDIN:
data["per_kwh"] = data["per_kwh"] * -1
data["nem_date"] = interval.nem_time.isoformat()
data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
@ -120,7 +120,7 @@ class AmberForecastSensor(AmberSensor):
return None
interval = intervals[0]
if interval.channel_type == ChannelType.FEED_IN:
if interval.channel_type == ChannelType.FEEDIN:
return format_cents_to_dollars(interval.per_kwh) * -1
return format_cents_to_dollars(interval.per_kwh)
@ -142,10 +142,10 @@ class AmberForecastSensor(AmberSensor):
for interval in intervals:
datum = {}
datum["duration"] = interval.duration
datum["date"] = interval.date.isoformat()
datum["date"] = interval.var_date.isoformat()
datum["nem_date"] = interval.nem_time.isoformat()
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
if interval.channel_type == ChannelType.FEED_IN:
if interval.channel_type == ChannelType.FEEDIN:
datum["per_kwh"] = datum["per_kwh"] * -1
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
datum["start_time"] = interval.start_time.isoformat()

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/amcrest",
"iot_class": "local_polling",
"loggers": ["amcrest"],
"quality_scale": "legacy",
"requirements": ["amcrest==1.9.8"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/ampio",
"iot_class": "cloud_polling",
"loggers": ["asmog"],
"quality_scale": "legacy",
"requirements": ["asmog==0.0.6"]
}

View File

@ -16,7 +16,6 @@ from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
OptionsFlowWithConfigEntry,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@ -46,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
def async_get_options_flow(
config_entry: ConfigEntry,
) -> HomeassistantAnalyticsOptionsFlowHandler:
"""Get the options flow for this handler."""
return HomeassistantAnalyticsOptionsFlowHandler(config_entry)
return HomeassistantAnalyticsOptionsFlowHandler()
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@ -132,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
)
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
"""Handle Homeassistant Analytics options."""
async def async_step_init(
@ -211,6 +212,6 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
),
},
),
self.options,
self.config_entry.options,
),
)

View File

@ -13,7 +13,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithConfigEntry,
OptionsFlow,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT
from homeassistant.core import callback
@ -186,16 +186,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
return OptionsFlowHandler(config_entry)
class OptionsFlowHandler(OptionsFlowWithConfigEntry):
class OptionsFlowHandler(OptionsFlow):
"""Handle an option flow for Android Debug Bridge."""
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize options flow."""
super().__init__(config_entry)
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
self._state_det_rules: dict[str, Any] = self.options.setdefault(
CONF_STATE_DETECTION_RULES, {}
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
self._state_det_rules: dict[str, Any] = dict(
config_entry.options.get(CONF_STATE_DETECTION_RULES, {})
)
self._conf_app_id: str | None = None
self._conf_rule_id: str | None = None
@ -237,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
SelectOptionDict(value=k, label=v) for k, v in apps_list.items()
]
rules = [RULES_NEW_ID, *self._state_det_rules]
options = self.options
options = self.config_entry.options
data_schema = vol.Schema(
{

View File

@ -9,7 +9,7 @@
"loggers": ["adb_shell", "androidtv", "pure_python_adb"],
"requirements": [
"adb-shell[async]==0.4.4",
"androidtv[async]==0.0.73",
"androidtv[async]==0.0.75",
"pure-python-adb[async]==0.3.0.dev0"
]
}

View File

@ -20,7 +20,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithConfigEntry,
OptionsFlow,
)
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
from homeassistant.core import callback
@ -221,13 +221,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
return AndroidTVRemoteOptionsFlowHandler(config_entry)
class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry):
class AndroidTVRemoteOptionsFlowHandler(OptionsFlow):
"""Android TV Remote options flow."""
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize options flow."""
super().__init__(config_entry)
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
self._conf_app_id: str | None = None
@callback

View File

@ -7,7 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["androidtvremote2"],
"quality_scale": "platinum",
"requirements": ["androidtvremote2==0.1.2"],
"zeroconf": ["_androidtvremote2._tcp.local."]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl",
"iot_class": "local_polling",
"loggers": ["anel_pwrctrl"],
"quality_scale": "legacy",
"requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"]
}

View File

@ -121,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow):
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize options flow."""
self.config_entry = config_entry
self.last_rendered_recommended = config_entry.options.get(
CONF_RECOMMENDED, False
)

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aosmith",
"iot_class": "cloud_polling",
"requirements": ["py-aosmith==1.0.10"]
"requirements": ["py-aosmith==1.0.11"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
"iot_class": "local_push",
"loggers": ["aiokafka", "kafka_python"],
"quality_scale": "legacy",
"requirements": ["aiokafka==0.10.0"]
}

View File

@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
"iot_class": "local_polling",
"loggers": ["apcaccess"],
"quality_scale": "silver",
"requirements": ["aioapcaccess==0.4.2"]
}

View File

@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
"iot_class": "local_push",
"loggers": ["pyatv", "srptools"],
"requirements": ["pyatv==0.15.1"],
"requirements": ["pyatv==0.16.0"],
"zeroconf": [
"_mediaremotetv._tcp.local.",
"_companion-link._tcp.local.",

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/apprise",
"iot_class": "cloud_push",
"loggers": ["apprise"],
"quality_scale": "legacy",
"requirements": ["apprise==1.9.0"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/aprs",
"iot_class": "cloud_push",
"loggers": ["aprslib", "geographiclib", "geopy"],
"quality_scale": "legacy",
"requirements": ["aprslib==0.7.2", "geopy==2.3.0"]
}

View File

@ -5,12 +5,17 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData
from APsystemsEZ1 import (
APsystemsEZ1M,
InverterReturnedError,
ReturnAlarmInfo,
ReturnOutputData,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import LOGGER
from .const import DOMAIN, LOGGER
@dataclass
@ -43,6 +48,11 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
self.api.min_power = device_info.minPower
async def _async_update_data(self) -> ApSystemsSensorData:
try:
output_data = await self.api.get_output_data()
alarm_info = await self.api.get_alarm_info()
except InverterReturnedError:
raise UpdateFailed(
translation_domain=DOMAIN, translation_key="inverter_error"
) from None
return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info)

View File

@ -72,5 +72,10 @@
"name": "Inverter status"
}
}
},
"exceptions": {
"inverter_error": {
"message": "Inverter returned an error"
}
}
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/aqualogic",
"iot_class": "local_push",
"loggers": ["aqualogic"],
"quality_scale": "legacy",
"requirements": ["aqualogic==2.6"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/aquostv",
"iot_class": "local_polling",
"loggers": ["sharp_aquos_rc"],
"quality_scale": "legacy",
"requirements": ["sharp_aquos_rc==0.3.2"]
}

View File

@ -3,5 +3,6 @@
"name": "aREST",
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/arest",
"iot_class": "local_polling"
"iot_class": "local_polling",
"quality_scale": "legacy"
}

View File

@ -6,5 +6,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["arris_tg2492lg"],
"quality_scale": "legacy",
"requirements": ["arris-tg2492lg==2.2.0"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/aruba",
"iot_class": "local_polling",
"loggers": ["pexpect", "ptyprocess"],
"quality_scale": "legacy",
"requirements": ["pexpect==4.6.0"]
}

View File

@ -4,5 +4,6 @@
"codeowners": [],
"dependencies": ["mqtt"],
"documentation": "https://www.home-assistant.io/integrations/arwn",
"iot_class": "local_polling"
"iot_class": "local_polling",
"quality_scale": "legacy"
}

View File

@ -22,8 +22,8 @@ class EnhancedAudioChunk:
timestamp_ms: int
"""Timestamp relative to start of audio stream (milliseconds)"""
is_speech: bool | None
"""True if audio chunk likely contains speech, False if not, None if unknown"""
speech_probability: float | None
"""Probability that audio chunk contains speech (0-1), None if unknown"""
class AudioEnhancer(ABC):
@ -70,27 +70,27 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
)
self.vad: MicroVad | None = None
self.threshold = 0.5
if self.is_vad_enabled:
self.vad = MicroVad()
_LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold)
_LOGGER.debug("Initialized microVAD")
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
is_speech: bool | None = None
speech_probability: float | None = None
assert len(audio) == BYTES_PER_CHUNK
if self.vad is not None:
# Run VAD
speech_prob = self.vad.Process10ms(audio)
is_speech = speech_prob > self.threshold
speech_probability = self.vad.Process10ms(audio)
if self.audio_processor is not None:
# Run noise suppression and auto gain
audio = self.audio_processor.Process10ms(audio).audio
return EnhancedAudioChunk(
audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech
audio=audio,
timestamp_ms=timestamp_ms,
speech_probability=speech_probability,
)

View File

@ -31,6 +31,7 @@ from homeassistant.components.tts import (
)
from homeassistant.core import Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import intent
from homeassistant.helpers.collection import (
CHANGE_UPDATED,
CollectionError,
@ -109,6 +110,7 @@ PIPELINE_FIELDS: VolDictType = {
vol.Required("tts_voice"): vol.Any(str, None),
vol.Required("wake_word_entity"): vol.Any(str, None),
vol.Required("wake_word_id"): vol.Any(str, None),
vol.Optional("prefer_local_intents"): bool,
}
STORED_PIPELINE_RUNS = 10
@ -322,6 +324,7 @@ async def async_update_pipeline(
tts_voice: str | None | UndefinedType = UNDEFINED,
wake_word_entity: str | None | UndefinedType = UNDEFINED,
wake_word_id: str | None | UndefinedType = UNDEFINED,
prefer_local_intents: bool | UndefinedType = UNDEFINED,
) -> None:
"""Update a pipeline."""
pipeline_data: PipelineData = hass.data[DOMAIN]
@ -345,6 +348,7 @@ async def async_update_pipeline(
("tts_voice", tts_voice),
("wake_word_entity", wake_word_entity),
("wake_word_id", wake_word_id),
("prefer_local_intents", prefer_local_intents),
)
if val is not UNDEFINED
}
@ -398,6 +402,7 @@ class Pipeline:
tts_voice: str | None
wake_word_entity: str | None
wake_word_id: str | None
prefer_local_intents: bool = False
id: str = field(default_factory=ulid_util.ulid_now)
@ -421,6 +426,7 @@ class Pipeline:
tts_voice=data["tts_voice"],
wake_word_entity=data["wake_word_entity"],
wake_word_id=data["wake_word_id"],
prefer_local_intents=data.get("prefer_local_intents", False),
)
def to_json(self) -> dict[str, Any]:
@ -438,6 +444,7 @@ class Pipeline:
"tts_voice": self.tts_voice,
"wake_word_entity": self.wake_word_entity,
"wake_word_id": self.wake_word_id,
"prefer_local_intents": self.prefer_local_intents,
}
@ -780,7 +787,9 @@ class PipelineRun:
# speaking the voice command.
audio_chunks_for_stt.extend(
EnhancedAudioChunk(
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
audio=chunk_ts[0],
timestamp_ms=chunk_ts[1],
speech_probability=None,
)
for chunk_ts in result.queued_audio
)
@ -827,7 +836,7 @@ class PipelineRun:
if wake_word_vad is not None:
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
if not wake_word_vad.process(chunk_seconds, chunk.speech_probability):
raise WakeWordTimeoutError(
code="wake-word-timeout", message="Wake word was not detected"
)
@ -955,7 +964,7 @@ class PipelineRun:
if stt_vad is not None:
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
if not stt_vad.process(chunk_seconds, chunk.is_speech):
if not stt_vad.process(chunk_seconds, chunk.speech_probability):
# Silence detected at the end of voice command
self.process_event(
PipelineEvent(
@ -1009,20 +1018,64 @@ class PipelineRun:
"intent_input": intent_input,
"conversation_id": conversation_id,
"device_id": device_id,
"prefer_local_intents": self.pipeline.prefer_local_intents,
},
)
)
try:
conversation_result = await conversation.async_converse(
hass=self.hass,
user_input = conversation.ConversationInput(
text=intent_input,
context=self.context,
conversation_id=conversation_id,
device_id=device_id,
context=self.context,
language=self.pipeline.conversation_language,
language=self.pipeline.language,
agent_id=self.intent_agent,
)
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
conversation_result: conversation.ConversationResult | None = None
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
# Sentence triggers override conversation agent
if (
trigger_response_text
:= await conversation.async_handle_sentence_triggers(
self.hass, user_input
)
) is not None:
# Sentence trigger matched
trigger_response = intent.IntentResponse(
self.pipeline.conversation_language
)
trigger_response.async_set_speech(trigger_response_text)
conversation_result = conversation.ConversationResult(
response=trigger_response,
conversation_id=user_input.conversation_id,
)
# Try local intents first, if preferred.
elif self.pipeline.prefer_local_intents and (
intent_response := await conversation.async_handle_intents(
self.hass, user_input
)
):
# Local intent matched
conversation_result = conversation.ConversationResult(
response=intent_response,
conversation_id=user_input.conversation_id,
)
processed_locally = True
if conversation_result is None:
# Fall back to pipeline conversation agent
conversation_result = await conversation.async_converse(
hass=self.hass,
text=user_input.text,
conversation_id=user_input.conversation_id,
device_id=user_input.device_id,
context=user_input.context,
language=user_input.language,
agent_id=user_input.agent_id,
)
except Exception as src_error:
_LOGGER.exception("Unexpected error during intent recognition")
raise IntentRecognitionError(
@ -1035,7 +1088,10 @@ class PipelineRun:
self.process_event(
PipelineEvent(
PipelineEventType.INTENT_END,
{"intent_output": conversation_result.as_dict()},
{
"processed_locally": processed_locally,
"intent_output": conversation_result.as_dict(),
},
)
)
@ -1221,7 +1277,7 @@ class PipelineRun:
yield EnhancedAudioChunk(
audio=sub_chunk,
timestamp_ms=timestamp_ms,
is_speech=None, # no VAD
speech_probability=None, # no VAD
)
timestamp_ms += MS_PER_CHUNK

View File

@ -75,7 +75,7 @@ class AudioBuffer:
class VoiceCommandSegmenter:
"""Segments an audio stream into voice commands."""
speech_seconds: float = 0.3
speech_seconds: float = 0.1
"""Seconds of speech before voice command has started."""
command_seconds: float = 1.0
@ -96,6 +96,12 @@ class VoiceCommandSegmenter:
timed_out: bool = False
"""True a timeout occurred during voice command."""
before_command_speech_threshold: float = 0.2
"""Probability threshold for speech before voice command."""
in_command_speech_threshold: float = 0.5
"""Probability threshold for speech during voice command."""
_speech_seconds_left: float = 0.0
"""Seconds left before considering voice command as started."""
@ -124,7 +130,7 @@ class VoiceCommandSegmenter:
self._reset_seconds_left = self.reset_seconds
self.in_command = False
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
"""Process samples using external VAD.
Returns False when command is done.
@ -142,7 +148,12 @@ class VoiceCommandSegmenter:
self.timed_out = True
return False
if speech_probability is None:
speech_probability = 0.0
if not self.in_command:
# Before command
is_speech = speech_probability > self.before_command_speech_threshold
if is_speech:
self._reset_seconds_left = self.reset_seconds
self._speech_seconds_left -= chunk_seconds
@ -160,12 +171,17 @@ class VoiceCommandSegmenter:
if self._reset_seconds_left <= 0:
self._speech_seconds_left = self.speech_seconds
self._reset_seconds_left = self.reset_seconds
elif not is_speech:
else:
# In command
is_speech = speech_probability > self.in_command_speech_threshold
if not is_speech:
# Silence in command
self._reset_seconds_left = self.reset_seconds
self._silence_seconds_left -= chunk_seconds
self._command_seconds_left -= chunk_seconds
if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0):
if (self._silence_seconds_left <= 0) and (
self._command_seconds_left <= 0
):
# Command finished successfully
self.reset()
_LOGGER.debug("Voice command finished")
@ -226,6 +242,9 @@ class VoiceActivityTimeout:
reset_seconds: float = 0.5
"""Seconds of speech before resetting timeout."""
speech_threshold: float = 0.5
"""Threshold for speech."""
_silence_seconds_left: float = 0.0
"""Seconds left before considering voice command as stopped."""
@ -241,12 +260,15 @@ class VoiceActivityTimeout:
self._silence_seconds_left = self.silence_seconds
self._reset_seconds_left = self.reset_seconds
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
"""Process samples using external VAD.
Returns False when timeout is reached.
"""
if is_speech:
if speech_probability is None:
speech_probability = 0.0
if speech_probability > self.speech_threshold:
# Speech
self._reset_seconds_left -= chunk_seconds
if self._reset_seconds_left <= 0:

View File

@ -4,5 +4,6 @@
"codeowners": ["@mtdcr"],
"documentation": "https://www.home-assistant.io/integrations/aten_pe",
"iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["atenpdu==0.3.2"]
}

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/atome",
"iot_class": "cloud_polling",
"loggers": ["pyatome"],
"quality_scale": "legacy",
"requirements": ["pyAtome==0.1.1"]
}

View File

@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.0"]
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"]
}

View File

@ -22,13 +22,14 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
_reauth_username: str
def __init__(self) -> None:
"""Initialize the config flow."""
self.data: dict = {}
self.options: dict = {CONF_SERVICES: []}
self.services: list[dict[str, Any]] = []
self.client: AussieBB | None = None
self._reauth_username: str | None = None
async def async_auth(self, user_input: dict[str, str]) -> dict[str, str] | None:
"""Reusable Auth Helper."""
@ -92,7 +93,7 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN):
errors: dict[str, str] | None = None
if user_input and self._reauth_username:
if user_input:
data = {
CONF_USERNAME: self._reauth_username,
CONF_PASSWORD: user_input[CONF_PASSWORD],

View File

@ -0,0 +1,99 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules:
status: todo
comment: |
The entity.py file is not used in this integration.
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates:
status: exempt
comment: |
This integration only polls data using a coordinator.
Since the integration is read-only and poll-only (only provide sensor
data), there is no need to implement parallel updates.
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a service
provider, which uses the users home address to get the data.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a service
provider, which uses the users home address to get the data.
docs-data-update: done
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices:
status: exempt
comment: |
This is an service, which doesn't integrate with any devices.
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
This integration does not have any entities that should disabled by default.
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration doesn't have any cases where raising an issue is needed.
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/avea",
"iot_class": "local_polling",
"loggers": ["avea"],
"quality_scale": "legacy",
"requirements": ["avea==1.5.1"]
}

View File

@ -4,5 +4,6 @@
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/avion",
"iot_class": "assumed_state",
"quality_scale": "legacy",
"requirements": ["avion==0.10"]
}

View File

@ -14,7 +14,4 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN):
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Import a config entry."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="configuration.yaml", data=import_data)

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/aws",
"iot_class": "cloud_push",
"loggers": ["aiobotocore", "botocore"],
"quality_scale": "legacy",
"requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"]
}

View File

@ -18,7 +18,7 @@ from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithConfigEntry,
OptionsFlow,
)
from homeassistant.const import (
CONF_HOST,
@ -59,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler:
def async_get_options_flow(
config_entry: ConfigEntry,
) -> AxisOptionsFlowHandler:
"""Get the options flow for this handler."""
return AxisOptionsFlowHandler(config_entry)
return AxisOptionsFlowHandler()
def __init__(self) -> None:
"""Initialize the Axis config flow."""
@ -264,7 +266,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
return await self.async_step_user()
class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
class AxisOptionsFlowHandler(OptionsFlow):
"""Handle Axis device options."""
config_entry: AxisConfigEntry
@ -282,8 +284,7 @@ class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
) -> ConfigFlowResult:
"""Manage the Axis device stream options."""
if user_input is not None:
self.options.update(user_input)
return self.async_create_entry(title="", data=self.options)
return self.async_create_entry(data=self.config_entry.options | user_input)
schema = {}

View File

@ -29,7 +29,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["axis"],
"quality_scale": "platinum",
"requirements": ["axis==63"],
"ssdp": [
{

Some files were not shown because too many files have changed in this diff Show More