This commit is contained in:
Bram Kragten 2024-09-04 17:41:08 +02:00 committed by GitHub
commit 36ec1b33fe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2390 changed files with 69301 additions and 51684 deletions

View File

@ -61,6 +61,7 @@ components: &components
- homeassistant/components/auth/**
- homeassistant/components/automation/**
- homeassistant/components/backup/**
- homeassistant/components/blueprint/**
- homeassistant/components/bluetooth/**
- homeassistant/components/cloud/**
- homeassistant/components/config/**
@ -146,6 +147,7 @@ requirements: &requirements
- homeassistant/package_constraints.txt
- requirements*.txt
- pyproject.toml
- script/licenses.py
any:
- *base_platforms

View File

@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: translations
path: translations.tar.gz
@ -197,7 +197,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.03.5
uses: home-assistant/builder@2024.08.2
with:
args: |
$BUILD_ARGS \
@ -263,7 +263,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.03.5
uses: home-assistant/builder@2024.08.2
with:
args: |
$BUILD_ARGS \
@ -323,7 +323,7 @@ jobs:
uses: actions/checkout@v4.1.7
- name: Install Cosign
uses: sigstore/cosign-installer@v3.5.0
uses: sigstore/cosign-installer@v3.6.0
with:
cosign-release: "v2.2.3"
@ -482,3 +482,56 @@ jobs:
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
twine upload dist/* --skip-existing
hassfest-image:
name: Build and test hassfest image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
needs: ["init"]
if: github.repository_owner == 'home-assistant'
env:
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
load: true
tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
push: true
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@6149ea5740be74af77f260b9db67e633f6b0a9a1 # v1.4.2
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

View File

@ -31,12 +31,16 @@ on:
description: "Only run mypy"
default: false
type: boolean
audit-licenses-only:
description: "Only run audit licenses"
default: false
type: boolean
env:
CACHE_VERSION: 9
CACHE_VERSION: 10
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 8
HA_SHORT_VERSION: "2024.8"
HA_SHORT_VERSION: "2024.9"
DEFAULT_PYTHON: "3.12"
ALL_PYTHON_VERSIONS: "['3.12']"
# 10.3 is the oldest supported version
@ -222,6 +226,7 @@ jobs:
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs:
- info
steps:
@ -343,6 +348,7 @@ jobs:
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-other:
name: Check other linters
runs-on: ubuntu-24.04
@ -508,8 +514,7 @@ jobs:
uv pip install -U "pip>=21.3.1" setuptools wheel
uv pip install -r requirements.txt
python -m script.gen_requirements_all ci
uv pip install -r requirements_all_pytest.txt
uv pip install -r requirements_test.txt
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
uv pip install -e . --config-settings editable_mode=compat
hassfest:
@ -518,6 +523,7 @@ jobs:
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs:
- info
- base
@ -556,6 +562,7 @@ jobs:
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
needs:
- info
- base
@ -589,7 +596,10 @@ jobs:
- info
- base
if: |
needs.info.outputs.requirements == 'true'
(github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
|| github.event.inputs.audit-licenses-only == 'true')
&& needs.info.outputs.requirements == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
@ -613,7 +623,7 @@ jobs:
. venv/bin/activate
pip-licenses --format=json --output-file=licenses.json
- name: Upload licenses
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: licenses
path: licenses.json
@ -628,6 +638,7 @@ jobs:
timeout-minutes: 20
if: |
github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
|| github.event.inputs.pylint-only == 'true'
needs:
- info
@ -672,7 +683,9 @@ jobs:
runs-on: ubuntu-24.04
timeout-minutes: 20
if: |
(github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true')
(github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
|| github.event.inputs.pylint-only == 'true')
&& (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true')
needs:
- info
@ -703,20 +716,21 @@ jobs:
run: |
. venv/bin/activate
python --version
pylint --ignore-missing-annotations=y tests
pylint tests
- name: Run pylint (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
run: |
. venv/bin/activate
python --version
pylint --ignore-missing-annotations=y tests/components/${{ needs.info.outputs.tests_glob }}
pylint tests/components/${{ needs.info.outputs.tests_glob }}
mypy:
name: Check mypy
runs-on: ubuntu-24.04
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
|| github.event.inputs.mypy-only == 'true'
needs:
- info
@ -781,6 +795,7 @@ jobs:
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
&& needs.info.outputs.test_full_suite == 'true'
needs:
- info
@ -818,7 +833,7 @@ jobs:
. venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: pytest_buckets
path: pytest_buckets.txt
@ -831,6 +846,7 @@ jobs:
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
&& needs.info.outputs.test_full_suite == 'true'
needs:
- info
@ -918,14 +934,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
@ -951,6 +967,7 @@ jobs:
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
&& needs.info.outputs.mariadb_groups != '[]'
needs:
- info
@ -1043,7 +1060,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
@ -1051,7 +1068,7 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
@ -1076,6 +1093,7 @@ jobs:
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
&& needs.info.outputs.postgresql_groups != '[]'
needs:
- info
@ -1169,7 +1187,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
@ -1177,7 +1195,7 @@ jobs:
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
@ -1220,6 +1238,7 @@ jobs:
&& github.event.inputs.lint-only != 'true'
&& github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
&& needs.info.outputs.tests_glob
&& needs.info.outputs.test_full_suite == 'false'
needs:
@ -1310,14 +1329,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.1.7
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.25.15
uses: github/codeql-action/init@v3.26.5
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.25.15
uses: github/codeql-action/analyze@v3.26.5
with:
category: "/language:python"

View File

@ -82,14 +82,14 @@ jobs:
) > .env_file
- name: Upload env_file
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: env_file
path: ./.env_file
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: requirements_diff
path: ./requirements_diff.txt
@ -101,7 +101,7 @@ jobs:
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.3.4
uses: actions/upload-artifact@v4.3.6
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
@ -211,7 +211,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_old-cython.txt"
@ -226,7 +226,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
@ -240,7 +240,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
@ -254,7 +254,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.5
rev: v0.6.2
hooks:
- id: ruff
args:
@ -12,7 +12,7 @@ repos:
hooks:
- id: codespell
args:
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn,pres,ser,ue
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
- --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2
exclude_types: [csv, json, html]

View File

@ -95,8 +95,6 @@ homeassistant.components.aruba.*
homeassistant.components.arwn.*
homeassistant.components.aseko_pool_live.*
homeassistant.components.assist_pipeline.*
homeassistant.components.asterisk_cdr.*
homeassistant.components.asterisk_mbox.*
homeassistant.components.asuswrt.*
homeassistant.components.autarco.*
homeassistant.components.auth.*
@ -198,7 +196,9 @@ homeassistant.components.fritzbox.*
homeassistant.components.fritzbox_callmonitor.*
homeassistant.components.fronius.*
homeassistant.components.frontend.*
homeassistant.components.fujitsu_fglair.*
homeassistant.components.fully_kiosk.*
homeassistant.components.fyta.*
homeassistant.components.generic_hygrostat.*
homeassistant.components.generic_thermostat.*
homeassistant.components.geo_location.*
@ -294,7 +294,7 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.luftdaten.*
homeassistant.components.madvr.*
homeassistant.components.mailbox.*
homeassistant.components.manual.*
homeassistant.components.map.*
homeassistant.components.mastodon.*
homeassistant.components.matrix.*

View File

@ -108,6 +108,8 @@ build.json @home-assistant/supervisor
/tests/components/anova/ @Lash-L
/homeassistant/components/anthemav/ @hyralex
/tests/components/anthemav/ @hyralex
/homeassistant/components/anthropic/ @Shulyaka
/tests/components/anthropic/ @Shulyaka
/homeassistant/components/aosmith/ @bdr99
/tests/components/aosmith/ @bdr99
/homeassistant/components/apache_kafka/ @bachya
@ -347,8 +349,8 @@ build.json @home-assistant/supervisor
/tests/components/dremel_3d_printer/ @tkdrob
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
/homeassistant/components/dsmr/ @Robbie1221 @frenck
/tests/components/dsmr/ @Robbie1221 @frenck
/homeassistant/components/dsmr/ @Robbie1221
/tests/components/dsmr/ @Robbie1221
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
/homeassistant/components/duotecno/ @cereal2nd
@ -431,6 +433,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/evil_genius_labs/ @balloob
/tests/components/evil_genius_labs/ @balloob
/homeassistant/components/evohome/ @zxdavb
/tests/components/evohome/ @zxdavb
/homeassistant/components/ezviz/ @RenierM26 @baqs
/tests/components/ezviz/ @RenierM26 @baqs
/homeassistant/components/faa_delays/ @ntilley905
@ -496,6 +499,8 @@ build.json @home-assistant/supervisor
/tests/components/frontend/ @home-assistant/frontend
/homeassistant/components/frontier_silicon/ @wlcrs
/tests/components/frontier_silicon/ @wlcrs
/homeassistant/components/fujitsu_fglair/ @crevetor
/tests/components/fujitsu_fglair/ @crevetor
/homeassistant/components/fully_kiosk/ @cgarwood
/tests/components/fully_kiosk/ @cgarwood
/homeassistant/components/fyta/ @dontinelli
@ -823,8 +828,6 @@ build.json @home-assistant/supervisor
/tests/components/logbook/ @home-assistant/core
/homeassistant/components/logger/ @home-assistant/core
/tests/components/logger/ @home-assistant/core
/homeassistant/components/logi_circle/ @evanjd
/tests/components/logi_circle/ @evanjd
/homeassistant/components/london_underground/ @jpbede
/tests/components/london_underground/ @jpbede
/homeassistant/components/lookin/ @ANMalko @bdraco
@ -967,6 +970,8 @@ build.json @home-assistant/supervisor
/tests/components/nfandroidtv/ @tkdrob
/homeassistant/components/nibe_heatpump/ @elupus
/tests/components/nibe_heatpump/ @elupus
/homeassistant/components/nice_go/ @IceBotYT
/tests/components/nice_go/ @IceBotYT
/homeassistant/components/nightscout/ @marciogranzotto
/tests/components/nightscout/ @marciogranzotto
/homeassistant/components/nilu/ @hfurubotten
@ -1324,6 +1329,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/smarty/ @z0mbieprocess
/homeassistant/components/smhi/ @gjohansson-ST
/tests/components/smhi/ @gjohansson-ST
/homeassistant/components/smlight/ @tl-sl
/tests/components/smlight/ @tl-sl
/homeassistant/components/sms/ @ocalvo
/tests/components/sms/ @ocalvo
/homeassistant/components/snapcast/ @luar123
@ -1486,6 +1493,8 @@ build.json @home-assistant/supervisor
/tests/components/tomorrowio/ @raman325 @lymanepp
/homeassistant/components/totalconnect/ @austinmroczek
/tests/components/totalconnect/ @austinmroczek
/homeassistant/components/touchline_sl/ @jnsgruk
/tests/components/touchline_sl/ @jnsgruk
/homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696
/tests/components/tplink/ @rytilahti @bdraco @sdb9696
/homeassistant/components/tplink_omada/ @MarkGodwin
@ -1651,6 +1660,8 @@ build.json @home-assistant/supervisor
/tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG
/homeassistant/components/xiaomi_tv/ @simse
/homeassistant/components/xmpp/ @fabaff @flowolf
/homeassistant/components/yale/ @bdraco
/tests/components/yale/ @bdraco
/homeassistant/components/yale_smart_alarm/ @gjohansson-ST
/tests/components/yale_smart_alarm/ @gjohansson-ST
/homeassistant/components/yalexs_ble/ @bdraco

View File

@ -42,7 +42,8 @@ WORKDIR /usr/src
# Setup hass-release
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
&& uv pip install --system -e hass-release/
&& uv pip install --system -e hass-release/ \
&& chown -R vscode /usr/src/hass-release/data
USER vscode
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"

View File

@ -8,6 +8,8 @@ import glob
from http.client import HTTPConnection
import importlib
import os
from pathlib import Path
from ssl import SSLContext
import sys
import threading
import time
@ -143,6 +145,78 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_default_certs,
object=SSLContext,
function="load_default_certs",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_verify_locations,
object=SSLContext,
function="load_verify_locations",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_cert_chain,
object=SSLContext,
function="load_cert_chain",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.open,
object=Path,
function="open",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.read_text,
object=Path,
function="read_text",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.read_bytes,
object=Path,
function="read_bytes",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.write_text,
object=Path,
function="write_text",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.write_bytes,
object=Path,
function="write_bytes",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
)

View File

@ -586,10 +586,10 @@ async def async_enable_logging(
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
sys.excepthook = lambda *args: logging.getLogger(None).exception(
sys.excepthook = lambda *args: logging.getLogger().exception(
"Uncaught exception", exc_info=args
)
threading.excepthook = lambda args: logging.getLogger(None).exception(
threading.excepthook = lambda args: logging.getLogger().exception(
"Uncaught thread exception",
exc_info=( # type: ignore[arg-type]
args.exc_type,
@ -616,10 +616,9 @@ async def async_enable_logging(
_create_log_file, err_log_path, log_rotate_days
)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger = logging.getLogger("")
logger = logging.getLogger()
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)

View File

@ -1,5 +0,0 @@
{
"domain": "asterisk",
"name": "Asterisk",
"integrations": ["asterisk_cdr", "asterisk_mbox"]
}

View File

@ -0,0 +1,5 @@
{
"domain": "fujitsu",
"name": "Fujitsu",
"integrations": ["fujitsu_anywair", "fujitsu_fglair"]
}

View File

@ -0,0 +1,5 @@
{
"domain": "roth",
"name": "Roth",
"integrations": ["touchline", "touchline_sl"]
}

View File

@ -1,5 +1,11 @@
{
"domain": "yale",
"name": "Yale",
"integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"]
"integrations": [
"august",
"yale_smart_alarm",
"yalexs_ble",
"yale_home",
"yale"
]
}

View File

@ -7,8 +7,14 @@
}
},
"services": {
"capture_image": "mdi:camera",
"change_setting": "mdi:cog",
"trigger_automation": "mdi:play"
"capture_image": {
"service": "mdi:camera"
},
"change_setting": {
"service": "mdi:cog"
},
"trigger_automation": {
"service": "mdi:play"
}
}
}

View File

@ -81,7 +81,7 @@ class AcerSwitch(SwitchEntity):
write_timeout: int,
) -> None:
"""Init of the Acer projector."""
self.ser = serial.Serial(
self.serial = serial.Serial(
port=serial_port, timeout=timeout, write_timeout=write_timeout
)
self._serial_port = serial_port
@ -99,16 +99,16 @@ class AcerSwitch(SwitchEntity):
# was disconnected during runtime.
# This way the projector can be reconnected and will still work
try:
if not self.ser.is_open:
self.ser.open()
self.ser.write(msg.encode("utf-8"))
if not self.serial.is_open:
self.serial.open()
self.serial.write(msg.encode("utf-8"))
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
ret = self.ser.read_until(size=20).decode("utf-8")
ret = self.serial.read_until(size=20).decode("utf-8")
except serial.SerialException:
_LOGGER.error("Problem communicating with %s", self._serial_port)
self.ser.close()
self.serial.close()
return ret
def _write_read_format(self, msg: str) -> str:

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/acmeda",
"iot_class": "local_push",
"loggers": ["aiopulse"],
"requirements": ["aiopulse==0.4.4"]
"requirements": ["aiopulse==0.4.6"]
}

View File

@ -66,10 +66,20 @@
}
},
"services": {
"add_url": "mdi:link-plus",
"remove_url": "mdi:link-off",
"enable_url": "mdi:link-variant",
"disable_url": "mdi:link-variant-off",
"refresh": "mdi:refresh"
"add_url": {
"service": "mdi:link-plus"
},
"remove_url": {
"service": "mdi:link-off"
},
"enable_url": {
"service": "mdi:link-variant"
},
"disable_url": {
"service": "mdi:link-variant-off"
},
"refresh": {
"service": "mdi:refresh"
}
}
}

View File

@ -136,7 +136,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
# Tuple to hold data needed for notification
NotificationItem = namedtuple(
NotificationItem = namedtuple( # noqa: PYI024
"NotificationItem", "hnotify huser name plc_datatype callback"
)

View File

@ -1,5 +1,7 @@
{
"services": {
"write_data_by_name": "mdi:pencil"
"write_data_by_name": {
"service": "mdi:pencil"
}
}
}

View File

@ -1,5 +1,7 @@
{
"services": {
"set_time_to": "mdi:timer-cog"
"set_time_to": {
"service": "mdi:timer-cog"
}
}
}

View File

@ -7,7 +7,11 @@
}
},
"services": {
"add_tracking": "mdi:package-variant-plus",
"remove_tracking": "mdi:package-variant-minus"
"add_tracking": {
"service": "mdi:package-variant-plus"
},
"remove_tracking": {
"service": "mdi:package-variant-minus"
}
}
}

View File

@ -59,7 +59,7 @@ async def async_setup_entry(
platform = async_get_current_platform()
for service, method in CAMERA_SERVICES.items():
platform.async_register_entity_service(service, {}, method)
platform.async_register_entity_service(service, None, method)
class AgentCamera(MjpegCamera):

View File

@ -1,9 +1,19 @@
{
"services": {
"start_recording": "mdi:record-rec",
"stop_recording": "mdi:stop",
"enable_alerts": "mdi:bell-alert",
"disable_alerts": "mdi:bell-off",
"snapshot": "mdi:camera"
"start_recording": {
"service": "mdi:record-rec"
},
"stop_recording": {
"service": "mdi:stop"
},
"enable_alerts": {
"service": "mdi:bell-alert"
},
"disable_alerts": {
"service": "mdi:bell-off"
},
"snapshot": {
"service": "mdi:camera"
}
}
}

View File

@ -21,6 +21,7 @@ PLATFORMS: list[Platform] = [
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.UPDATE,
]

View File

@ -0,0 +1,55 @@
"""Airgradient Update platform."""
from datetime import timedelta
from functools import cached_property
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator
from .entity import AirGradientEntity
SCAN_INTERVAL = timedelta(hours=1)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AirGradientConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Airgradient update platform."""
data = config_entry.runtime_data
async_add_entities([AirGradientUpdate(data.measurement)], True)
class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Representation of Airgradient Update."""
_attr_device_class = UpdateDeviceClass.FIRMWARE
coordinator: AirGradientMeasurementCoordinator
def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.serial_number}-update"
@cached_property
def should_poll(self) -> bool:
"""Return True because we need to poll the latest version."""
return True
@property
def installed_version(self) -> str:
"""Return the installed version of the entity."""
return self.coordinator.data.firmware_version
async def async_update(self) -> None:
"""Update the entity."""
self._attr_latest_version = (
await self.coordinator.client.get_latest_firmware_version(
self.coordinator.serial_number
)
)

View File

@ -24,5 +24,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"iot_class": "local_polling",
"requirements": ["airthings-ble==0.9.0"]
"requirements": ["airthings-ble==0.9.1"]
}

View File

@ -1,9 +1,11 @@
"""Config flow for AirTouch4."""
from typing import Any
from airtouch4pyapi import AirTouch, AirTouchStatus
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST
from .const import DOMAIN
@ -16,7 +18,9 @@ class AirtouchConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
async def async_step_user(self, user_input=None):
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
if user_input is None:
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)

View File

@ -31,7 +31,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import (
aiohttp_client,
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
@ -62,8 +61,6 @@ PLATFORMS = [Platform.SENSOR]
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
@callback
def async_get_cloud_api_update_interval(

View File

@ -80,11 +80,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
"""Initialize."""
self._reauth_entry: ConfigEntry | None = None
async def async_step_import(
self, import_config: dict[str, Any]
) -> ConfigFlowResult:
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Import a config entry from `airvisual` integration (see #83882)."""
return await self.async_step_user(import_data)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]

View File

@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone",
"iot_class": "local_polling",
"loggers": ["aioairzone"],
"requirements": ["aioairzone==0.8.1"]
"requirements": ["aioairzone==0.8.2"]
}

View File

@ -2,16 +2,21 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any, Final
from aioairzone.common import GrilleAngle, SleepTimeout
from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout
from aioairzone.const import (
API_COLD_ANGLE,
API_HEAT_ANGLE,
API_MODE,
API_SLEEP,
AZD_COLD_ANGLE,
AZD_HEAT_ANGLE,
AZD_MASTER,
AZD_MODE,
AZD_MODES,
AZD_SLEEP,
AZD_ZONES,
)
@ -33,6 +38,9 @@ class AirzoneSelectDescription(SelectEntityDescription):
api_param: str
options_dict: dict[str, int]
options_fn: Callable[[dict[str, Any], dict[str, int]], list[str]] = (
lambda zone_data, value: list(value)
)
GRILLE_ANGLE_DICT: Final[dict[str, int]] = {
@ -42,6 +50,15 @@ GRILLE_ANGLE_DICT: Final[dict[str, int]] = {
"40deg": GrilleAngle.DEG_40,
}
MODE_DICT: Final[dict[str, int]] = {
"cool": OperationMode.COOLING,
"dry": OperationMode.DRY,
"fan": OperationMode.FAN,
"heat": OperationMode.HEATING,
"heat_cool": OperationMode.AUTO,
"stop": OperationMode.STOP,
}
SLEEP_DICT: Final[dict[str, int]] = {
"off": SleepTimeout.SLEEP_OFF,
"30m": SleepTimeout.SLEEP_30,
@ -50,6 +67,26 @@ SLEEP_DICT: Final[dict[str, int]] = {
}
def main_zone_options(
zone_data: dict[str, Any],
options: dict[str, int],
) -> list[str]:
"""Filter available modes."""
modes = zone_data.get(AZD_MODES, [])
return [k for k, v in options.items() if v in modes]
MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
AirzoneSelectDescription(
api_param=API_MODE,
key=AZD_MODE,
options_dict=MODE_DICT,
options_fn=main_zone_options,
translation_key="modes",
),
)
ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
AirzoneSelectDescription(
api_param=API_COLD_ANGLE,
@ -95,7 +132,20 @@ async def async_setup_entry(
received_zones = set(zones_data)
new_zones = received_zones - added_zones
if new_zones:
async_add_entities(
entities: list[AirzoneZoneSelect] = [
AirzoneZoneSelect(
coordinator,
description,
entry,
system_zone_id,
zones_data.get(system_zone_id),
)
for system_zone_id in new_zones
for description in MAIN_ZONE_SELECT_TYPES
if description.key in zones_data.get(system_zone_id)
and zones_data.get(system_zone_id).get(AZD_MASTER) is True
]
entities += [
AirzoneZoneSelect(
coordinator,
description,
@ -106,7 +156,8 @@ async def async_setup_entry(
for system_zone_id in new_zones
for description in ZONE_SELECT_TYPES
if description.key in zones_data.get(system_zone_id)
)
]
async_add_entities(entities)
added_zones.update(new_zones)
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
@ -153,6 +204,11 @@ class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
)
self.entity_description = description
self._attr_options = self.entity_description.options_fn(
zone_data, description.options_dict
)
self.values_dict = {v: k for k, v in description.options_dict.items()}
self._async_update_attrs()

View File

@ -52,6 +52,17 @@
"40deg": "[%key:component::airzone::entity::select::grille_angles::state::40deg%]"
}
},
"modes": {
"name": "Mode",
"state": {
"cool": "[%key:component::climate::entity_component::_::state::cool%]",
"dry": "[%key:component::climate::entity_component::_::state::dry%]",
"fan": "[%key:component::climate::entity_component::_::state::fan_only%]",
"heat": "[%key:component::climate::entity_component::_::state::heat%]",
"heat_cool": "[%key:component::climate::entity_component::_::state::heat_cool%]",
"stop": "Stop"
}
},
"sleep_times": {
"name": "Sleep",
"state": {

View File

@ -161,6 +161,11 @@ class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity):
entity_description: AirzoneBinarySensorEntityDescription
@property
def available(self) -> bool:
"""Return Airzone Cloud binary sensor availability."""
return super().available and self.is_on is not None
@callback
def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates."""

View File

@ -189,6 +189,11 @@ async def async_setup_entry(
class AirzoneSensor(AirzoneEntity, SensorEntity):
"""Define an Airzone Cloud sensor."""
@property
def available(self) -> bool:
"""Return Airzone Cloud sensor availability."""
return super().available and self.native_value is not None
@callback
def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates."""

View File

@ -15,12 +15,26 @@
}
},
"services": {
"alarm_arm_away": "mdi:shield-lock",
"alarm_arm_home": "mdi:shield-home",
"alarm_arm_night": "mdi:shield-moon",
"alarm_arm_custom_bypass": "mdi:security",
"alarm_disarm": "mdi:shield-off",
"alarm_trigger": "mdi:bell-ring",
"alarm_arm_vacation": "mdi:shield-airplane"
"alarm_arm_away": {
"service": "mdi:shield-lock"
},
"alarm_arm_home": {
"service": "mdi:shield-home"
},
"alarm_arm_night": {
"service": "mdi:shield-moon"
},
"alarm_arm_custom_bypass": {
"service": "mdi:security"
},
"alarm_disarm": {
"service": "mdi:shield-off"
},
"alarm_trigger": {
"service": "mdi:bell-ring"
},
"alarm_arm_vacation": {
"service": "mdi:shield-airplane"
}
}
}

View File

@ -7,7 +7,11 @@
}
},
"services": {
"alarm_keypress": "mdi:dialpad",
"alarm_toggle_chime": "mdi:abc"
"alarm_keypress": {
"service": "mdi:dialpad"
},
"alarm_toggle_chime": {
"service": "mdi:abc"
}
}
}

View File

@ -124,9 +124,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if not entities:
return False
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off")
component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on")
component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle")
await component.async_add_entities(entities)
@ -162,16 +162,8 @@ class Alert(Entity):
self._data = data
self._message_template = message_template
if self._message_template is not None:
self._message_template.hass = hass
self._done_message_template = done_message_template
if self._done_message_template is not None:
self._done_message_template.hass = hass
self._title_template = title_template
if self._title_template is not None:
self._title_template.hass = hass
self._notifiers = notifiers
self._can_ack = can_ack

View File

@ -1,7 +1,13 @@
{
"services": {
"toggle": "mdi:bell-ring",
"turn_off": "mdi:bell-off",
"turn_on": "mdi:bell-alert"
"toggle": {
"service": "mdi:bell-ring"
},
"turn_off": {
"service": "mdi:bell-off"
},
"turn_on": {
"service": "mdi:bell-alert"
}
}
}

View File

@ -52,7 +52,6 @@ class AlexaFlashBriefingView(http.HomeAssistantView):
"""Initialize Alexa view."""
super().__init__()
self.flash_briefings = flash_briefings
template.attach(hass, self.flash_briefings)
@callback
def get(

View File

@ -1206,7 +1206,7 @@ async def async_api_set_mode(
raise AlexaInvalidValueError(msg)
# Remote Activity
if instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
elif instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
activity = mode.split(".")[1]
activities: list[str] | None = entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
if activity != PRESET_MODE_NA and activities and activity in activities:

View File

@ -283,7 +283,7 @@ class AlexaPresetResource(AlexaCapabilityResource):
"""Implements Alexa PresetResources.
Use presetResources with RangeController to provide a set of
friendlyNamesfor each RangeController preset.
friendlyNames for each RangeController preset.
https://developer.amazon.com/docs/device-apis/resources-and-assets.html#presetresources
"""

View File

@ -194,7 +194,7 @@ async def async_handle_message(
try:
if not enabled:
raise AlexaBridgeUnreachableError(
raise AlexaBridgeUnreachableError( # noqa: TRY301
"Alexa API not enabled in Home Assistant configuration"
)

View File

@ -8,128 +8,23 @@ CONF_REGION: Final = "region_name"
CONF_ACCESS_KEY_ID: Final = "aws_access_key_id"
CONF_SECRET_ACCESS_KEY: Final = "aws_secret_access_key"
DEFAULT_REGION: Final = "us-east-1"
SUPPORTED_REGIONS: Final[list[str]] = [
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
"ca-central-1",
"eu-west-1",
"eu-central-1",
"eu-west-2",
"eu-west-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-northeast-2",
"ap-northeast-1",
"ap-south-1",
"sa-east-1",
]
CONF_ENGINE: Final = "engine"
CONF_VOICE: Final = "voice"
CONF_OUTPUT_FORMAT: Final = "output_format"
CONF_SAMPLE_RATE: Final = "sample_rate"
CONF_TEXT_TYPE: Final = "text_type"
SUPPORTED_VOICES: Final[list[str]] = [
"Aditi", # Hindi
"Amy", # English (British)
"Aria", # English (New Zealand), Neural
"Arlet", # Catalan, Neural
"Arthur", # English, Neural
"Astrid", # Swedish
"Ayanda", # English (South African), Neural
"Bianca", # Italian
"Brian", # English (British)
"Camila", # Portuguese, Brazilian
"Carla", # Italian
"Carmen", # Romanian
"Celine", # French
"Chantal", # French Canadian
"Conchita", # Spanish (European)
"Cristiano", # Portuguese (European)
"Daniel", # German, Neural
"Dora", # Icelandic
"Elin", # Swedish, Neural
"Emma", # English
"Enrique", # Spanish (European)
"Ewa", # Polish
"Filiz", # Turkish
"Gabrielle", # French (Canadian)
"Geraint", # English Welsh
"Giorgio", # Italian
"Gwyneth", # Welsh
"Hala", # Arabic (Gulf), Neural
"Hannah", # German (Austrian), Neural
"Hans", # German
"Hiujin", # Chinese (Cantonese), Neural
"Ida", # Norwegian, Neural
"Ines", # Portuguese, European # codespell:ignore ines
"Ivy", # English
"Jacek", # Polish
"Jan", # Polish
"Joanna", # English
"Joey", # English
"Justin", # English
"Kajal", # English (Indian)/Hindi (Bilingual ), Neural
"Karl", # Icelandic
"Kendra", # English
"Kevin", # English, Neural
"Kimberly", # English
"Laura", # Dutch, Neural
"Lea", # French
"Liam", # Canadian French, Neural
"Liv", # Norwegian
"Lotte", # Dutch
"Lucia", # Spanish European
"Lupe", # Spanish US
"Mads", # Danish
"Maja", # Polish
"Marlene", # German
"Mathieu", # French
"Matthew", # English
"Maxim", # Russian
"Mia", # Spanish Mexican
"Miguel", # Spanish US
"Mizuki", # Japanese
"Naja", # Danish
"Nicole", # English Australian
"Ola", # Polish, Neural
"Olivia", # Female, Australian, Neural
"Penelope", # Spanish US
"Pedro", # Spanish US, Neural
"Raveena", # English, Indian
"Ricardo", # Portuguese (Brazilian)
"Ruben", # Dutch
"Russell", # English (Australian)
"Ruth", # English, Neural
"Salli", # English
"Seoyeon", # Korean
"Stephen", # English, Neural
"Suvi", # Finnish
"Takumi", # Japanese
"Tatyana", # Russian
"Vicki", # German
"Vitoria", # Portuguese, Brazilian
"Zeina", # Arabic
"Zhiyu", # Chinese
]
SUPPORTED_OUTPUT_FORMATS: Final[set[str]] = {"mp3", "ogg_vorbis", "pcm"}
SUPPORTED_OUTPUT_FORMATS: Final[list[str]] = ["mp3", "ogg_vorbis", "pcm"]
SUPPORTED_SAMPLE_RATES: Final[set[str]] = {"8000", "16000", "22050", "24000"}
SUPPORTED_ENGINES: Final[list[str]] = ["neural", "standard"]
SUPPORTED_SAMPLE_RATES: Final[list[str]] = ["8000", "16000", "22050", "24000"]
SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, list[str]]] = {
"mp3": ["8000", "16000", "22050", "24000"],
"ogg_vorbis": ["8000", "16000", "22050"],
"pcm": ["8000", "16000"],
SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, set[str]]] = {
"mp3": {"8000", "16000", "22050", "24000"},
"ogg_vorbis": {"8000", "16000", "22050"},
"pcm": {"8000", "16000"},
}
SUPPORTED_TEXT_TYPES: Final[list[str]] = ["text", "ssml"]
SUPPORTED_TEXT_TYPES: Final[set[str]] = {"text", "ssml"}
CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = {
"audio/mpeg": "mp3",
@ -137,6 +32,8 @@ CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = {
"audio/pcm": "pcm",
}
DEFAULT_REGION: Final = "us-east-1"
DEFAULT_ENGINE: Final = "standard"
DEFAULT_VOICE: Final = "Joanna"
DEFAULT_OUTPUT_FORMAT: Final = "mp3"

View File

@ -16,6 +16,11 @@ from homeassistant.components.tts import (
)
from homeassistant.const import ATTR_CREDENTIALS, CONF_PROFILE_NAME
from homeassistant.core import HomeAssistant
from homeassistant.generated.amazon_polly import (
SUPPORTED_ENGINES,
SUPPORTED_REGIONS,
SUPPORTED_VOICES,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
@ -38,13 +43,10 @@ from .const import (
DEFAULT_SAMPLE_RATES,
DEFAULT_TEXT_TYPE,
DEFAULT_VOICE,
SUPPORTED_ENGINES,
SUPPORTED_OUTPUT_FORMATS,
SUPPORTED_REGIONS,
SUPPORTED_SAMPLE_RATES,
SUPPORTED_SAMPLE_RATES_MAP,
SUPPORTED_TEXT_TYPES,
SUPPORTED_VOICES,
)
_LOGGER: Final = logging.getLogger(__name__)

View File

@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["aioambient"],
"requirements": ["aioambient==2024.01.0"]
"requirements": ["aioambient==2024.08.0"]
}

View File

@ -17,7 +17,6 @@ from homeassistant.const import (
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_send
import homeassistant.helpers.entity_registry as er
@ -25,7 +24,6 @@ import homeassistant.helpers.entity_registry as er
from .const import (
ATTR_LAST_DATA,
CONF_APP_KEY,
DOMAIN,
LOGGER,
TYPE_SOLARRADIATION,
TYPE_SOLARRADIATION_LX,
@ -37,7 +35,6 @@ DATA_CONFIG = "config"
DEFAULT_SOCKET_MIN_RETRY = 15
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
type AmbientStationConfigEntry = ConfigEntry[AmbientStation]

View File

@ -2,6 +2,8 @@
from __future__ import annotations
from typing import Any
from aioambient import API
from aioambient.errors import AmbientError
import voluptuous as vol
@ -32,7 +34,9 @@ class AmbientStationFlowHandler(ConfigFlow, domain=DOMAIN):
errors=errors if errors else {},
)
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the start of the config flow."""
if not user_input:
return await self._show_form()

View File

@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["aioambient"],
"requirements": ["aioambient==2024.01.0"]
"requirements": ["aioambient==2024.08.0"]
}

View File

@ -499,7 +499,7 @@ class AmcrestCam(Camera):
await getattr(self, f"_async_set_{func}")(value)
new_value = await getattr(self, f"_async_get_{func}")()
if new_value != value:
raise AmcrestCommandFailed
raise AmcrestCommandFailed # noqa: TRY301
except (AmcrestError, AmcrestCommandFailed) as error:
if tries == 1:
log_update_error(_LOGGER, action, self.name, description, error)

View File

@ -1,15 +1,37 @@
{
"services": {
"enable_recording": "mdi:record-rec",
"disable_recording": "mdi:stop",
"enable_audio": "mdi:volume-high",
"disable_audio": "mdi:volume-off",
"enable_motion_recording": "mdi:motion-sensor",
"disable_motion_recording": "mdi:motion-sensor-off",
"goto_preset": "mdi:pan",
"set_color_bw": "mdi:palette",
"start_tour": "mdi:panorama",
"stop_tour": "mdi:panorama-outline",
"ptz_control": "mdi:pan"
"enable_recording": {
"service": "mdi:record-rec"
},
"disable_recording": {
"service": "mdi:stop"
},
"enable_audio": {
"service": "mdi:volume-high"
},
"disable_audio": {
"service": "mdi:volume-off"
},
"enable_motion_recording": {
"service": "mdi:motion-sensor"
},
"disable_motion_recording": {
"service": "mdi:motion-sensor-off"
},
"goto_preset": {
"service": "mdi:pan"
},
"set_color_bw": {
"service": "mdi:palette"
},
"start_tour": {
"service": "mdi:panorama"
},
"stop_tour": {
"service": "mdi:panorama-outline"
},
"ptz_control": {
"service": "mdi:pan"
}
}
}

View File

@ -14,7 +14,6 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
from .coordinator import AndroidIPCamDataUpdateCoordinator
@ -27,9 +26,6 @@ PLATFORMS: list[Platform] = [
]
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Android IP Webcam from a config entry."""
websession = async_get_clientsession(hass)

View File

@ -1,8 +1,16 @@
{
"services": {
"adb_command": "mdi:console",
"download": "mdi:download",
"upload": "mdi:upload",
"learn_sendevent": "mdi:remote"
"adb_command": {
"service": "mdi:console"
},
"download": {
"service": "mdi:download"
},
"upload": {
"service": "mdi:upload"
},
"learn_sendevent": {
"service": "mdi:remote"
}
}
}

View File

@ -87,7 +87,7 @@ async def async_setup_entry(
"adb_command",
)
platform.async_register_entity_service(
SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent"
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
)
platform.async_register_entity_service(
SERVICE_DOWNLOAD,

View File

@ -8,6 +8,6 @@
"iot_class": "local_push",
"loggers": ["androidtvremote2"],
"quality_scale": "platinum",
"requirements": ["androidtvremote2==0.1.1"],
"requirements": ["androidtvremote2==0.1.2"],
"zeroconf": ["_androidtvremote2._tcp.local."]
}

View File

@ -0,0 +1,46 @@
"""The Anthropic integration."""
from __future__ import annotations
import anthropic
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from .const import DOMAIN, LOGGER
PLATFORMS = (Platform.CONVERSATION,)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
"""Set up Anthropic from a config entry."""
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
try:
await client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=1,
messages=[{"role": "user", "content": "Hi"}],
timeout=10.0,
)
except anthropic.AuthenticationError as err:
LOGGER.error("Invalid API key: %s", err)
return False
except anthropic.AnthropicError as err:
raise ConfigEntryNotReady(err) from err
entry.runtime_data = client
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Anthropic."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -0,0 +1,210 @@
"""Config flow for Anthropic integration."""
from __future__ import annotations
import logging
from types import MappingProxyType
from typing import Any
import anthropic
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
from homeassistant.core import HomeAssistant
from homeassistant.helpers import llm
from homeassistant.helpers.selector import (
NumberSelector,
NumberSelectorConfig,
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
TemplateSelector,
)
from .const import (
CONF_CHAT_MODEL,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_RECOMMENDED,
CONF_TEMPERATURE,
DOMAIN,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
)
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
}
)
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect.
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
"""
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
await client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=1,
messages=[{"role": "user", "content": "Hi"}],
timeout=10.0,
)
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anthropic."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
await validate_input(self.hass, user_input)
except anthropic.APITimeoutError:
errors["base"] = "timeout_connect"
except anthropic.APIConnectionError:
errors["base"] = "cannot_connect"
except anthropic.APIStatusError as e:
if isinstance(e.body, dict):
errors["base"] = e.body.get("error", {}).get("type", "unknown")
else:
errors["base"] = "unknown"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title="Claude",
data=user_input,
options=RECOMMENDED_OPTIONS,
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
)
@staticmethod
def async_get_options_flow(
config_entry: ConfigEntry,
) -> OptionsFlow:
"""Create the options flow."""
return AnthropicOptionsFlow(config_entry)
class AnthropicOptionsFlow(OptionsFlow):
"""Anthropic config flow options handler."""
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize options flow."""
self.config_entry = config_entry
self.last_rendered_recommended = config_entry.options.get(
CONF_RECOMMENDED, False
)
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Manage the options."""
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
if user_input is not None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if user_input[CONF_LLM_HASS_API] == "none":
user_input.pop(CONF_LLM_HASS_API)
return self.async_create_entry(title="", data=user_input)
# Re-render the options again, now with the recommended options shown/hidden
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
}
suggested_values = options.copy()
if not suggested_values.get(CONF_PROMPT):
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
schema = self.add_suggested_values_to_schema(
vol.Schema(anthropic_config_option_schema(self.hass, options)),
suggested_values,
)
return self.async_show_form(
step_id="init",
data_schema=schema,
)
def anthropic_config_option_schema(
hass: HomeAssistant,
options: dict[str, Any] | MappingProxyType[str, Any],
) -> dict:
"""Return a schema for Anthropic completion options."""
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(
label="No control",
value="none",
)
]
hass_apis.extend(
SelectOptionDict(
label=api.name,
value=api.id,
)
for api in llm.async_get_apis(hass)
)
schema = {
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
SelectSelectorConfig(options=hass_apis)
),
vol.Required(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool,
}
if options.get(CONF_RECOMMENDED):
return schema
schema.update(
{
vol.Optional(
CONF_CHAT_MODEL,
default=RECOMMENDED_CHAT_MODEL,
): str,
vol.Optional(
CONF_MAX_TOKENS,
default=RECOMMENDED_MAX_TOKENS,
): int,
vol.Optional(
CONF_TEMPERATURE,
default=RECOMMENDED_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
}
)
return schema

View File

@ -0,0 +1,15 @@
"""Constants for the Anthropic integration."""
import logging
DOMAIN = "anthropic"
LOGGER = logging.getLogger(__package__)
CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt"
CONF_CHAT_MODEL = "chat_model"
RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307"
CONF_MAX_TOKENS = "max_tokens"
RECOMMENDED_MAX_TOKENS = 1024
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0

View File

@ -0,0 +1,316 @@
"""Conversation support for Anthropic."""
from collections.abc import Callable
import json
from typing import Any, Literal, cast
import anthropic
from anthropic._types import NOT_GIVEN
from anthropic.types import (
Message,
MessageParam,
TextBlock,
TextBlockParam,
ToolParam,
ToolResultBlockParam,
ToolUseBlock,
ToolUseBlockParam,
)
import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.components.conversation import trace
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError, TemplateError
from homeassistant.helpers import device_registry as dr, intent, llm, template
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import ulid
from . import AnthropicConfigEntry
from .const import (
CONF_CHAT_MODEL,
CONF_MAX_TOKENS,
CONF_PROMPT,
CONF_TEMPERATURE,
DOMAIN,
LOGGER,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
)
# Max number of back and forth with the LLM to generate a response
MAX_TOOL_ITERATIONS = 10
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AnthropicConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up conversation entities."""
agent = AnthropicConversationEntity(config_entry)
async_add_entities([agent])
def _format_tool(
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
) -> ToolParam:
"""Format tool specification."""
return ToolParam(
name=tool.name,
description=tool.description or "",
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
)
def _message_convert(
message: Message,
) -> MessageParam:
"""Convert from class to TypedDict."""
param_content: list[TextBlockParam | ToolUseBlockParam] = []
for message_content in message.content:
if isinstance(message_content, TextBlock):
param_content.append(TextBlockParam(type="text", text=message_content.text))
elif isinstance(message_content, ToolUseBlock):
param_content.append(
ToolUseBlockParam(
type="tool_use",
id=message_content.id,
name=message_content.name,
input=message_content.input,
)
)
return MessageParam(role=message.role, content=param_content)
class AnthropicConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
):
"""Anthropic conversation agent."""
_attr_has_entity_name = True
_attr_name = None
def __init__(self, entry: AnthropicConfigEntry) -> None:
"""Initialize the agent."""
self.entry = entry
self.history: dict[str, list[MessageParam]] = {}
self._attr_unique_id = entry.entry_id
self._attr_device_info = dr.DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
manufacturer="Anthropic",
model="Claude",
entry_type=dr.DeviceEntryType.SERVICE,
)
if self.entry.options.get(CONF_LLM_HASS_API):
self._attr_supported_features = (
conversation.ConversationEntityFeature.CONTROL
)
@property
def supported_languages(self) -> list[str] | Literal["*"]:
"""Return a list of supported languages."""
return MATCH_ALL
async def async_added_to_hass(self) -> None:
"""When entity is added to Home Assistant."""
await super().async_added_to_hass()
self.entry.async_on_unload(
self.entry.add_update_listener(self._async_entry_update_listener)
)
async def async_process(
self, user_input: conversation.ConversationInput
) -> conversation.ConversationResult:
"""Process a sentence."""
options = self.entry.options
intent_response = intent.IntentResponse(language=user_input.language)
llm_api: llm.APIInstance | None = None
tools: list[ToolParam] | None = None
user_name: str | None = None
llm_context = llm.LLMContext(
platform=DOMAIN,
context=user_input.context,
user_prompt=user_input.text,
language=user_input.language,
assistant=conversation.DOMAIN,
device_id=user_input.device_id,
)
if options.get(CONF_LLM_HASS_API):
try:
llm_api = await llm.async_get_api(
self.hass,
options[CONF_LLM_HASS_API],
llm_context,
)
except HomeAssistantError as err:
LOGGER.error("Error getting LLM API: %s", err)
intent_response.async_set_error(
intent.IntentResponseErrorCode.UNKNOWN,
f"Error preparing LLM API: {err}",
)
return conversation.ConversationResult(
response=intent_response, conversation_id=user_input.conversation_id
)
tools = [
_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools
]
if user_input.conversation_id is None:
conversation_id = ulid.ulid_now()
messages = []
elif user_input.conversation_id in self.history:
conversation_id = user_input.conversation_id
messages = self.history[conversation_id]
else:
# Conversation IDs are ULIDs. We generate a new one if not provided.
# If an old OLID is passed in, we will generate a new one to indicate
# a new conversation was started. If the user picks their own, they
# want to track a conversation and we respect it.
try:
ulid.ulid_to_bytes(user_input.conversation_id)
conversation_id = ulid.ulid_now()
except ValueError:
conversation_id = user_input.conversation_id
messages = []
if (
user_input.context
and user_input.context.user_id
and (
user := await self.hass.auth.async_get_user(user_input.context.user_id)
)
):
user_name = user.name
try:
prompt_parts = [
template.Template(
llm.BASE_PROMPT
+ options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT),
self.hass,
).async_render(
{
"ha_name": self.hass.config.location_name,
"user_name": user_name,
"llm_context": llm_context,
},
parse_result=False,
)
]
except TemplateError as err:
LOGGER.error("Error rendering prompt: %s", err)
intent_response.async_set_error(
intent.IntentResponseErrorCode.UNKNOWN,
f"Sorry, I had a problem with my template: {err}",
)
return conversation.ConversationResult(
response=intent_response, conversation_id=conversation_id
)
if llm_api:
prompt_parts.append(llm_api.api_prompt)
prompt = "\n".join(prompt_parts)
# Create a copy of the variable because we attach it to the trace
messages = [*messages, MessageParam(role="user", content=user_input.text)]
LOGGER.debug("Prompt: %s", messages)
LOGGER.debug("Tools: %s", tools)
trace.async_conversation_trace_append(
trace.ConversationTraceEventType.AGENT_DETAIL,
{"system": prompt, "messages": messages},
)
client = self.entry.runtime_data
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
try:
response = await client.messages.create(
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
messages=messages,
tools=tools or NOT_GIVEN,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=prompt,
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
)
except anthropic.AnthropicError as err:
intent_response.async_set_error(
intent.IntentResponseErrorCode.UNKNOWN,
f"Sorry, I had a problem talking to Anthropic: {err}",
)
return conversation.ConversationResult(
response=intent_response, conversation_id=conversation_id
)
LOGGER.debug("Response %s", response)
messages.append(_message_convert(response))
if response.stop_reason != "tool_use" or not llm_api:
break
tool_results: list[ToolResultBlockParam] = []
for tool_call in response.content:
if isinstance(tool_call, TextBlock):
LOGGER.info(tool_call.text)
if not isinstance(tool_call, ToolUseBlock):
continue
tool_input = llm.ToolInput(
tool_name=tool_call.name,
tool_args=cast(dict[str, Any], tool_call.input),
)
LOGGER.debug(
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
)
try:
tool_response = await llm_api.async_call_tool(tool_input)
except (HomeAssistantError, vol.Invalid) as e:
tool_response = {"error": type(e).__name__}
if str(e):
tool_response["error_text"] = str(e)
LOGGER.debug("Tool response: %s", tool_response)
tool_results.append(
ToolResultBlockParam(
type="tool_result",
tool_use_id=tool_call.id,
content=json.dumps(tool_response),
)
)
messages.append(MessageParam(role="user", content=tool_results))
self.history[conversation_id] = messages
for content in response.content:
if isinstance(content, TextBlock):
intent_response.async_set_speech(content.text)
break
return conversation.ConversationResult(
response=intent_response, conversation_id=conversation_id
)
async def _async_entry_update_listener(
self, hass: HomeAssistant, entry: ConfigEntry
) -> None:
"""Handle options update."""
# Reload as we update device info + entity name + supported features
await hass.config_entries.async_reload(entry.entry_id)

View File

@ -0,0 +1,12 @@
{
"domain": "anthropic",
"name": "Anthropic Conversation",
"after_dependencies": ["assist_pipeline", "intent"],
"codeowners": ["@Shulyaka"],
"config_flow": true,
"dependencies": ["conversation"],
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["anthropic==0.31.2"]
}

View File

@ -0,0 +1,34 @@
{
"config": {
"step": {
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
}
},
"options": {
"step": {
"init": {
"data": {
"prompt": "Instructions",
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"recommended": "Recommended model settings"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
}
}
}
}
}

View File

@ -8,7 +8,6 @@ from typing import Final
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
from .coordinator import APCUPSdCoordinator
@ -17,8 +16,6 @@ _LOGGER = logging.getLogger(__name__)
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Use config values to set up a function enabling status retrieval."""

View File

@ -6,4 +6,4 @@ DOMAIN: Final = "apcupsd"
CONNECTION_TIMEOUT: int = 10
# Field name of last self test retrieved from apcupsd.
LASTSTEST: Final = "laststest"
LAST_S_TEST: Final = "laststest"

View File

@ -13,7 +13,6 @@ from homeassistant.components.sensor import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
PERCENTAGE,
STATE_UNKNOWN,
UnitOfApparentPower,
UnitOfElectricCurrent,
UnitOfElectricPotential,
@ -26,7 +25,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, LASTSTEST
from .const import DOMAIN, LAST_S_TEST
from .coordinator import APCUPSdCoordinator
PARALLEL_UPDATES = 0
@ -157,8 +156,8 @@ SENSORS: dict[str, SensorEntityDescription] = {
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
LASTSTEST: SensorEntityDescription(
key=LASTSTEST,
LAST_S_TEST: SensorEntityDescription(
key=LAST_S_TEST,
translation_key="last_self_test",
),
"lastxfer": SensorEntityDescription(
@ -423,7 +422,7 @@ async def async_setup_entry(
# periodical (or manual) self test since last daemon restart. It might not be available
# when we set up the integration, and we do not know if it would ever be available. Here we
# add it anyway and mark it as unknown initially.
for resource in available_resources | {LASTSTEST}:
for resource in available_resources | {LAST_S_TEST}:
if resource not in SENSORS:
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
continue
@ -484,7 +483,7 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
# performed) and may disappear again after certain event. So we mark the state as "unknown"
# when it becomes unknown after such events.
if key not in self.coordinator.data:
self._attr_native_value = STATE_UNKNOWN
self._attr_native_value = None
return
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])

View File

@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
"iot_class": "local_push",
"loggers": ["pyatv", "srptools"],
"requirements": ["pyatv==0.14.3"],
"requirements": ["pyatv==0.15.0"],
"zeroconf": [
"_mediaremotetv._tcp.local.",
"_companion-link._tcp.local.",

View File

@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant
from .const import DEFAULT_PORT
from .coordinator import ApSystemsDataCoordinator
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.NUMBER,
Platform.SENSOR,
Platform.SWITCH,
]
@dataclass

View File

@ -0,0 +1,102 @@
"""The read-only binary sensors for APsystems local API integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from APsystemsEZ1 import ReturnAlarmInfo
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import ApSystemsConfigEntry, ApSystemsData
from .coordinator import ApSystemsDataCoordinator
from .entity import ApSystemsEntity
@dataclass(frozen=True, kw_only=True)
class ApsystemsLocalApiBinarySensorDescription(BinarySensorEntityDescription):
"""Describes Apsystens Inverter binary sensor entity."""
is_on: Callable[[ReturnAlarmInfo], bool | None]
BINARY_SENSORS: tuple[ApsystemsLocalApiBinarySensorDescription, ...] = (
ApsystemsLocalApiBinarySensorDescription(
key="off_grid_status",
translation_key="off_grid_status",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
is_on=lambda c: c.offgrid,
),
ApsystemsLocalApiBinarySensorDescription(
key="dc_1_short_circuit_error_status",
translation_key="dc_1_short_circuit_error_status",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
is_on=lambda c: c.shortcircuit_1,
),
ApsystemsLocalApiBinarySensorDescription(
key="dc_2_short_circuit_error_status",
translation_key="dc_2_short_circuit_error_status",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
is_on=lambda c: c.shortcircuit_2,
),
ApsystemsLocalApiBinarySensorDescription(
key="output_fault_status",
translation_key="output_fault_status",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
is_on=lambda c: not c.operating,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ApSystemsConfigEntry,
add_entities: AddEntitiesCallback,
) -> None:
"""Set up the binary sensor platform."""
config = config_entry.runtime_data
add_entities(
ApSystemsBinarySensorWithDescription(
data=config,
entity_description=desc,
)
for desc in BINARY_SENSORS
)
class ApSystemsBinarySensorWithDescription(
CoordinatorEntity[ApSystemsDataCoordinator], ApSystemsEntity, BinarySensorEntity
):
"""Base binary sensor to be used with description."""
entity_description: ApsystemsLocalApiBinarySensorDescription
def __init__(
self,
data: ApSystemsData,
entity_description: ApsystemsLocalApiBinarySensorDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(data.coordinator)
ApSystemsEntity.__init__(self, data)
self.entity_description = entity_description
self._attr_unique_id = f"{data.device_id}_{entity_description.key}"
@property
def is_on(self) -> bool | None:
"""Return value of sensor."""
return self.entity_description.is_on(self.coordinator.data.alarm_info)

View File

@ -2,17 +2,26 @@
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
from APsystemsEZ1 import APsystemsEZ1M, ReturnOutputData
from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import LOGGER
class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]):
@dataclass
class ApSystemsSensorData:
"""Representing different Apsystems sensor data."""
output_data: ReturnOutputData
alarm_info: ReturnAlarmInfo
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
"""Coordinator used for all sensors."""
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
@ -25,5 +34,14 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]):
)
self.api = api
async def _async_update_data(self) -> ReturnOutputData:
return await self.api.get_output_data()
async def _async_setup(self) -> None:
try:
max_power = (await self.api.get_device_info()).maxPower
except (ConnectionError, TimeoutError):
raise UpdateFailed from None
self.api.max_power = max_power
async def _async_update_data(self) -> ApSystemsSensorData:
output_data = await self.api.get_output_data()
alarm_info = await self.api.get_alarm_info()
return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info)

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/apsystems",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["apsystems-ez1==1.3.3"]
"requirements": ["apsystems-ez1==2.2.1"]
}

View File

@ -26,7 +26,6 @@ async def async_setup_entry(
class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
"""Base sensor to be used with description."""
_attr_native_max_value = 800
_attr_native_min_value = 30
_attr_native_step = 1
_attr_device_class = NumberDeviceClass.POWER
@ -42,6 +41,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
super().__init__(data)
self._api = data.coordinator.api
self._attr_unique_id = f"{data.device_id}_output_limit"
self._attr_native_max_value = data.coordinator.api.max_power
async def async_update(self) -> None:
"""Set the state with the value fetched from the inverter."""

View File

@ -148,4 +148,4 @@ class ApSystemsSensorWithDescription(
@property
def native_value(self) -> StateType:
"""Return value of sensor."""
return self.entity_description.value_fn(self.coordinator.data)
return self.entity_description.value_fn(self.coordinator.data.output_data)

View File

@ -19,6 +19,20 @@
}
},
"entity": {
"binary_sensor": {
"off_grid_status": {
"name": "Off grid status"
},
"dc_1_short_circuit_error_status": {
"name": "DC 1 short circuit error status"
},
"dc_2_short_circuit_error_status": {
"name": "DC 2 short circuit error status"
},
"output_fault_status": {
"name": "Output fault status"
}
},
"sensor": {
"total_power": {
"name": "Total power"

View File

@ -5,7 +5,6 @@ from __future__ import annotations
from typing import Any
from aiohttp.client_exceptions import ClientConnectionError
from APsystemsEZ1 import Status
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
from homeassistant.core import HomeAssistant
@ -45,12 +44,12 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
self._attr_available = False
else:
self._attr_available = True
self._attr_is_on = status == Status.normal
self._attr_is_on = status
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._api.set_device_power_status(0)
await self._api.set_device_power_status(True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._api.set_device_power_status(1)
await self._api.set_device_power_status(False)

View File

@ -3,12 +3,14 @@
from __future__ import annotations
from aioaquacell import AquacellApi
from aioaquacell.const import Brand
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_BRAND
from .coordinator import AquacellCoordinator
PLATFORMS = [Platform.SENSOR]
@ -20,7 +22,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AquacellConfigEntry) ->
"""Set up Aquacell from a config entry."""
session = async_get_clientsession(hass)
aquacell_api = AquacellApi(session)
brand = entry.data.get(CONF_BRAND, Brand.AQUACELL)
aquacell_api = AquacellApi(session, brand)
coordinator = AquacellCoordinator(hass, aquacell_api)

View File

@ -7,18 +7,27 @@ import logging
from typing import Any
from aioaquacell import ApiException, AquacellApi, AuthenticationFailed
from aioaquacell.const import SUPPORTED_BRANDS, Brand
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN
from .const import (
CONF_BRAND,
CONF_REFRESH_TOKEN,
CONF_REFRESH_TOKEN_CREATION_TIME,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_BRAND, default=Brand.AQUACELL): vol.In(
{key: brand.name for key, brand in SUPPORTED_BRANDS.items()}
),
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
}
@ -33,7 +42,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
"""Handle the cloud logon step."""
errors: dict[str, str] = {}
if user_input is not None:
await self.async_set_unique_id(
@ -42,7 +51,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
self._abort_if_unique_id_configured()
session = async_get_clientsession(self.hass)
api = AquacellApi(session)
api = AquacellApi(session, user_input[CONF_BRAND])
try:
refresh_token = await api.authenticate(
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
@ -59,6 +68,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
title=user_input[CONF_EMAIL],
data={
**user_input,
CONF_BRAND: user_input[CONF_BRAND],
CONF_REFRESH_TOKEN: refresh_token,
CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(),
},

View File

@ -5,6 +5,7 @@ from datetime import timedelta
DOMAIN = "aquacell"
DATA_AQUACELL = "DATA_AQUACELL"
CONF_BRAND = "brand"
CONF_REFRESH_TOKEN = "refresh_token"
CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time"

View File

@ -1,6 +1,6 @@
{
"domain": "aquacell",
"name": "Aquacell",
"name": "AquaCell",
"codeowners": ["@Jordi1990"],
"config_flow": true,
"dependencies": ["http", "network"],

View File

@ -2,8 +2,9 @@
"config": {
"step": {
"user": {
"description": "Fill in your Aquacell mobile app credentials",
"description": "Select the brand of the softener and fill in your softener mobile app credentials",
"data": {
"brand": "Brand",
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
}

View File

@ -6,6 +6,9 @@
},
"radiation_rate": {
"default": "mdi:radioactive"
},
"radon_concentration": {
"default": "mdi:radioactive"
}
}
}

View File

@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/aranet",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["aranet4==2.3.4"]
"requirements": ["aranet4==2.4.0"]
}

View File

@ -99,6 +99,13 @@ SENSOR_DESCRIPTIONS = {
suggested_display_precision=4,
scale=0.000001,
),
"radon_concentration": AranetSensorEntityDescription(
key="radon_concentration",
translation_key="radon_concentration",
name="Radon Concentration",
native_unit_of_measurement="Bq/m³",
state_class=SensorStateClass.MEASUREMENT,
),
"battery": AranetSensorEntityDescription(
key="battery",
name="Battery",

View File

@ -11,12 +11,10 @@ from arcam.fmj.client import Client
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
DEFAULT_SCAN_INTERVAL,
DOMAIN,
SIGNAL_CLIENT_DATA,
SIGNAL_CLIENT_STARTED,
SIGNAL_CLIENT_STOPPED,
@ -26,7 +24,6 @@ type ArcamFmjConfigEntry = ConfigEntry[Client]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
PLATFORMS = [Platform.MEDIA_PLAYER]

View File

@ -87,8 +87,6 @@ def setup_platform(
if value_template is None:
return lambda value: value
value_template.hass = hass
def _render(value):
try:
return value_template.async_render({"value": value}, parse_result=False)

View File

@ -0,0 +1 @@
"""Virtual integration: ArtSound."""

View File

@ -0,0 +1,6 @@
{
"domain": "artsound",
"name": "ArtSound",
"integration_type": "virtual",
"supported_by": "linkplay"
}

View File

@ -101,7 +101,7 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
)
async def async_step_reauth(
self, user_input: Mapping[str, Any]
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
@ -109,10 +109,10 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
self.context["entry_id"]
)
return await self.async_step_reauth_confirm(user_input)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: Mapping | None = None
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""

View File

@ -5,6 +5,7 @@ from dataclasses import dataclass
import logging
from pymicro_vad import MicroVad
from pyspeex_noise import AudioProcessor
from .const import BYTES_PER_CHUNK
@ -41,8 +42,8 @@ class AudioEnhancer(ABC):
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
class MicroVadEnhancer(AudioEnhancer):
"""Audio enhancer that just runs microVAD."""
class MicroVadSpeexEnhancer(AudioEnhancer):
"""Audio enhancer that runs microVAD and speex."""
def __init__(
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
@ -50,6 +51,24 @@ class MicroVadEnhancer(AudioEnhancer):
"""Initialize audio enhancer."""
super().__init__(auto_gain, noise_suppression, is_vad_enabled)
self.audio_processor: AudioProcessor | None = None
# Scale from 0-4
self.noise_suppression = noise_suppression * -15
# Scale from 0-31
self.auto_gain = auto_gain * 300
if (self.auto_gain != 0) or (self.noise_suppression != 0):
self.audio_processor = AudioProcessor(
self.auto_gain, self.noise_suppression
)
_LOGGER.debug(
"Initialized speex with auto_gain=%s, noise_suppression=%s",
self.auto_gain,
self.noise_suppression,
)
self.vad: MicroVad | None = None
self.threshold = 0.5
@ -61,12 +80,17 @@ class MicroVadEnhancer(AudioEnhancer):
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
is_speech: bool | None = None
assert len(audio) == BYTES_PER_CHUNK
if self.vad is not None:
# Run VAD
assert len(audio) == BYTES_PER_CHUNK
speech_prob = self.vad.Process10ms(audio)
is_speech = speech_prob > self.threshold
if self.audio_processor is not None:
# Run noise suppression and auto gain
audio = self.audio_processor.Process10ms(audio).audio
return EnhancedAudioChunk(
audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech
)

View File

@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["pymicro-vad==1.0.1"]
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
}

View File

@ -49,7 +49,7 @@ from homeassistant.util import (
)
from homeassistant.util.limited_size_dict import LimitedSizeDict
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
from .const import (
BYTES_PER_CHUNK,
CONF_DEBUG_RECORDING_DIR,
@ -589,7 +589,7 @@ class PipelineRun:
# Initialize with audio settings
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
# Default audio enhancer
self.audio_enhancer = MicroVadEnhancer(
self.audio_enhancer = MicroVadSpeexEnhancer(
self.audio_settings.auto_gain_dbfs,
self.audio_settings.noise_suppression_level,
self.audio_settings.is_vad_enabled,

View File

@ -6,13 +6,11 @@ from collections.abc import Callable, Iterable
from dataclasses import dataclass
from enum import StrEnum
import logging
from typing import Final
from .const import SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH
_LOGGER = logging.getLogger(__name__)
_SAMPLE_RATE: Final = 16000 # Hz
_SAMPLE_WIDTH: Final = 2 # bytes
class VadSensitivity(StrEnum):
"""How quickly the end of a voice command is detected."""
@ -26,12 +24,12 @@ class VadSensitivity(StrEnum):
"""Return seconds of silence for sensitivity level."""
sensitivity = VadSensitivity(sensitivity)
if sensitivity == VadSensitivity.RELAXED:
return 2.0
return 1.25
if sensitivity == VadSensitivity.AGGRESSIVE:
return 0.5
return 0.25
return 1.0
return 0.7
class AudioBuffer:
@ -80,7 +78,10 @@ class VoiceCommandSegmenter:
speech_seconds: float = 0.3
"""Seconds of speech before voice command has started."""
silence_seconds: float = 1.0
command_seconds: float = 1.0
"""Minimum number of seconds for a voice command."""
silence_seconds: float = 0.7
"""Seconds of silence after voice command has ended."""
timeout_seconds: float = 15.0
@ -92,9 +93,15 @@ class VoiceCommandSegmenter:
in_command: bool = False
"""True if inside voice command."""
timed_out: bool = False
"""True a timeout occurred during voice command."""
_speech_seconds_left: float = 0.0
"""Seconds left before considering voice command as started."""
_command_seconds_left: float = 0.0
"""Seconds left before voice command could stop."""
_silence_seconds_left: float = 0.0
"""Seconds left before considering voice command as stopped."""
@ -111,6 +118,7 @@ class VoiceCommandSegmenter:
def reset(self) -> None:
"""Reset all counters and state."""
self._speech_seconds_left = self.speech_seconds
self._command_seconds_left = self.command_seconds - self.speech_seconds
self._silence_seconds_left = self.silence_seconds
self._timeout_seconds_left = self.timeout_seconds
self._reset_seconds_left = self.reset_seconds
@ -121,6 +129,9 @@ class VoiceCommandSegmenter:
Returns False when command is done.
"""
if self.timed_out:
self.timed_out = False
self._timeout_seconds_left -= chunk_seconds
if self._timeout_seconds_left <= 0:
_LOGGER.warning(
@ -128,6 +139,7 @@ class VoiceCommandSegmenter:
self.timeout_seconds,
)
self.reset()
self.timed_out = True
return False
if not self.in_command:
@ -137,6 +149,9 @@ class VoiceCommandSegmenter:
if self._speech_seconds_left <= 0:
# Inside voice command
self.in_command = True
self._command_seconds_left = (
self.command_seconds - self.speech_seconds
)
self._silence_seconds_left = self.silence_seconds
_LOGGER.debug("Voice command started")
else:
@ -149,7 +164,8 @@ class VoiceCommandSegmenter:
# Silence in command
self._reset_seconds_left = self.reset_seconds
self._silence_seconds_left -= chunk_seconds
if self._silence_seconds_left <= 0:
self._command_seconds_left -= chunk_seconds
if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0):
# Command finished successfully
self.reset()
_LOGGER.debug("Voice command finished")
@ -158,6 +174,7 @@ class VoiceCommandSegmenter:
# Speech in command.
# Reset silence counter if enough speech.
self._reset_seconds_left -= chunk_seconds
self._command_seconds_left -= chunk_seconds
if self._reset_seconds_left <= 0:
self._silence_seconds_left = self.silence_seconds
self._reset_seconds_left = self.reset_seconds
@ -179,7 +196,9 @@ class VoiceCommandSegmenter:
"""
if vad_samples_per_chunk is None:
# No chunking
chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE
chunk_seconds = (
len(chunk) // (SAMPLE_WIDTH * SAMPLE_CHANNELS)
) / SAMPLE_RATE
is_speech = vad_is_speech(chunk)
return self.process(chunk_seconds, is_speech)
@ -187,8 +206,8 @@ class VoiceCommandSegmenter:
raise ValueError("leftover_chunk_buffer is required when vad uses chunking")
# With chunking
seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE
bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH
seconds_per_chunk = vad_samples_per_chunk / SAMPLE_RATE
bytes_per_chunk = vad_samples_per_chunk * (SAMPLE_WIDTH * SAMPLE_CHANNELS)
for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer):
is_speech = vad_is_speech(vad_chunk)
if not self.process(seconds_per_chunk, is_speech):

View File

@ -1 +0,0 @@
"""The asterisk_cdr component."""

View File

@ -1,70 +0,0 @@
"""Support for the Asterisk CDR interface."""
from __future__ import annotations
import datetime
import hashlib
from typing import Any
from homeassistant.components.asterisk_mbox import (
DOMAIN as ASTERISK_DOMAIN,
SIGNAL_CDR_UPDATE,
)
from homeassistant.components.mailbox import Mailbox
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
MAILBOX_NAME = "asterisk_cdr"
async def async_get_handler(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> Mailbox:
"""Set up the Asterix CDR platform."""
return AsteriskCDR(hass, MAILBOX_NAME)
class AsteriskCDR(Mailbox):
"""Asterisk VM Call Data Record mailbox."""
def __init__(self, hass: HomeAssistant, name: str) -> None:
"""Initialize Asterisk CDR."""
super().__init__(hass, name)
self.cdr: list[dict[str, Any]] = []
async_dispatcher_connect(self.hass, SIGNAL_CDR_UPDATE, self._update_callback)
@callback
def _update_callback(self, msg: list[dict[str, Any]]) -> Any:
"""Update the message count in HA, if needed."""
self._build_message()
self.async_update()
def _build_message(self) -> None:
"""Build message structure."""
cdr: list[dict[str, Any]] = []
for entry in self.hass.data[ASTERISK_DOMAIN].cdr:
timestamp = datetime.datetime.strptime(
entry["time"], "%Y-%m-%d %H:%M:%S"
).timestamp()
info = {
"origtime": timestamp,
"callerid": entry["callerid"],
"duration": entry["duration"],
}
sha = hashlib.sha256(str(entry).encode("utf-8")).hexdigest()
msg = (
f"Destination: {entry['dest']}\n"
f"Application: {entry['application']}\n "
f"Context: {entry['context']}"
)
cdr.append({"info": info, "sha": sha, "text": msg})
self.cdr = cdr
async def async_get_messages(self) -> list[dict[str, Any]]:
"""Return a list of the current messages."""
if not self.cdr:
self._build_message()
return self.cdr

View File

@ -1,8 +0,0 @@
{
"domain": "asterisk_cdr",
"name": "Asterisk Call Detail Records",
"codeowners": [],
"dependencies": ["asterisk_mbox"],
"documentation": "https://www.home-assistant.io/integrations/asterisk_cdr",
"iot_class": "local_polling"
}

View File

@ -1,153 +0,0 @@
"""Support for Asterisk Voicemail interface."""
import logging
from typing import Any, cast
from asterisk_mbox import Client as asteriskClient
from asterisk_mbox.commands import (
CMD_MESSAGE_CDR,
CMD_MESSAGE_CDR_AVAILABLE,
CMD_MESSAGE_LIST,
)
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
DOMAIN = "asterisk_mbox"
SIGNAL_DISCOVER_PLATFORM = "asterisk_mbox.discover_platform"
SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request"
SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated"
SIGNAL_CDR_UPDATE = "asterisk_mbox.message_updated"
SIGNAL_CDR_REQUEST = "asterisk_mbox.message_request"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_PORT): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up for the Asterisk Voicemail box."""
conf: dict[str, Any] = config[DOMAIN]
host: str = conf[CONF_HOST]
port: int = conf[CONF_PORT]
password: str = conf[CONF_PASSWORD]
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
create_issue(
hass,
DOMAIN,
"deprecated_integration",
breaks_in_ha_version="2024.9.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_integration",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Asterisk Voicemail",
"mailbox": "mailbox",
},
)
return True
class AsteriskData:
"""Store Asterisk mailbox data."""
def __init__(
self,
hass: HomeAssistant,
host: str,
port: int,
password: str,
config: dict[str, Any],
) -> None:
"""Init the Asterisk data object."""
self.hass = hass
self.config = config
self.messages: list[dict[str, Any]] | None = None
self.cdr: list[dict[str, Any]] | None = None
dispatcher_connect(self.hass, SIGNAL_MESSAGE_REQUEST, self._request_messages)
dispatcher_connect(self.hass, SIGNAL_CDR_REQUEST, self._request_cdr)
dispatcher_connect(self.hass, SIGNAL_DISCOVER_PLATFORM, self._discover_platform)
# Only connect after signal connection to ensure we don't miss any
self.client = asteriskClient(host, port, password, self.handle_data)
@callback
def _discover_platform(self, component: str) -> None:
_LOGGER.debug("Adding mailbox %s", component)
self.hass.async_create_task(
discovery.async_load_platform(
self.hass, "mailbox", component, {}, self.config
)
)
@callback
def handle_data(
self, command: int, msg: list[dict[str, Any]] | dict[str, Any]
) -> None:
"""Handle changes to the mailbox."""
if command == CMD_MESSAGE_LIST:
msg = cast(list[dict[str, Any]], msg)
_LOGGER.debug("AsteriskVM sent updated message list: Len %d", len(msg))
old_messages = self.messages
self.messages = sorted(
msg, key=lambda item: item["info"]["origtime"], reverse=True
)
if not isinstance(old_messages, list):
async_dispatcher_send(self.hass, SIGNAL_DISCOVER_PLATFORM, DOMAIN)
async_dispatcher_send(self.hass, SIGNAL_MESSAGE_UPDATE, self.messages)
elif command == CMD_MESSAGE_CDR:
msg = cast(dict[str, Any], msg)
_LOGGER.debug(
"AsteriskVM sent updated CDR list: Len %d", len(msg.get("entries", []))
)
self.cdr = msg["entries"]
async_dispatcher_send(self.hass, SIGNAL_CDR_UPDATE, self.cdr)
elif command == CMD_MESSAGE_CDR_AVAILABLE:
if not isinstance(self.cdr, list):
_LOGGER.debug("AsteriskVM adding CDR platform")
self.cdr = []
async_dispatcher_send(
self.hass, SIGNAL_DISCOVER_PLATFORM, "asterisk_cdr"
)
async_dispatcher_send(self.hass, SIGNAL_CDR_REQUEST)
else:
_LOGGER.debug(
"AsteriskVM sent unknown message '%d' len: %d", command, len(msg)
)
@callback
def _request_messages(self) -> None:
"""Handle changes to the mailbox."""
_LOGGER.debug("Requesting message list")
self.client.messages()
@callback
def _request_cdr(self) -> None:
"""Handle changes to the CDR."""
_LOGGER.debug("Requesting CDR list")
self.client.get_cdr()

View File

@ -1,86 +0,0 @@
"""Support for the Asterisk Voicemail interface."""
from __future__ import annotations
from functools import partial
import logging
from typing import Any
from asterisk_mbox import ServerError
from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import DOMAIN as ASTERISK_DOMAIN, AsteriskData
_LOGGER = logging.getLogger(__name__)
SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request"
SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated"
async def async_get_handler(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> Mailbox:
"""Set up the Asterix VM platform."""
return AsteriskMailbox(hass, ASTERISK_DOMAIN)
class AsteriskMailbox(Mailbox):
"""Asterisk VM Sensor."""
def __init__(self, hass: HomeAssistant, name: str) -> None:
"""Initialize Asterisk mailbox."""
super().__init__(hass, name)
async_dispatcher_connect(
self.hass, SIGNAL_MESSAGE_UPDATE, self._update_callback
)
@callback
def _update_callback(self, msg: str) -> None:
"""Update the message count in HA, if needed."""
self.async_update()
@property
def media_type(self) -> str:
"""Return the supported media type."""
return CONTENT_TYPE_MPEG
@property
def can_delete(self) -> bool:
"""Return if messages can be deleted."""
return True
@property
def has_media(self) -> bool:
"""Return if messages have attached media files."""
return True
async def async_get_media(self, msgid: str) -> bytes:
"""Return the media blob for the msgid."""
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
client = data.client
try:
return await self.hass.async_add_executor_job(
partial(client.mp3, msgid, sync=True)
)
except ServerError as err:
raise StreamError(err) from err
async def async_get_messages(self) -> list[dict[str, Any]]:
"""Return a list of the current messages."""
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
return data.messages or []
async def async_delete(self, msgid: str) -> bool:
"""Delete the specified messages."""
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
client = data.client
_LOGGER.info("Deleting: %s", msgid)
await self.hass.async_add_executor_job(client.delete, msgid)
return True

View File

@ -1,9 +0,0 @@
{
"domain": "asterisk_mbox",
"name": "Asterisk Voicemail",
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/asterisk_mbox",
"iot_class": "local_push",
"loggers": ["asterisk_mbox"],
"requirements": ["asterisk_mbox==0.5.0"]
}

View File

@ -1,8 +0,0 @@
{
"issues": {
"deprecated_integration": {
"title": "The {integration_title} is being removed",
"description": "{integration_title} is being removed as the `{mailbox}` platform is being removed and {integration_title} supports no other platforms. Remove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
}
}
}

View File

@ -52,7 +52,7 @@ SENSORS_TYPE_LOAD_AVG = "sensors_load_avg"
SENSORS_TYPE_RATES = "sensors_rates"
SENSORS_TYPE_TEMPERATURES = "sensors_temperatures"
WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"])
WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024
_LOGGER = logging.getLogger(__name__)

View File

@ -6,15 +6,16 @@ from pathlib import Path
from typing import cast
from aiohttp import ClientResponseError
from yalexs.const import Brand
from yalexs.exceptions import AugustApiAIOHTTPError
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
from yalexs.manager.gateway import Config as YaleXSConfig
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from .const import DOMAIN, PLATFORMS
from .data import AugustData
@ -24,7 +25,27 @@ from .util import async_create_august_clientsession
type AugustConfigEntry = ConfigEntry[AugustData]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@callback
def _async_create_yale_brand_migration_issue(
hass: HomeAssistant, entry: AugustConfigEntry
) -> None:
"""Create an issue for a brand migration."""
ir.async_create_issue(
hass,
DOMAIN,
"yale_brand_migration",
breaks_in_ha_version="2024.9",
learn_more_url="https://www.home-assistant.io/integrations/yale",
translation_key="yale_brand_migration",
is_fixable=False,
severity=ir.IssueSeverity.CRITICAL,
translation_placeholders={
"migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale"
},
)
async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool:
"""Set up August from a config entry."""
session = async_create_august_clientsession(hass)
august_gateway = AugustGateway(Path(hass.config.config_dir), session)
@ -40,6 +61,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
return True
async def async_remove_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> None:
"""Remove an August config entry."""
ir.async_delete_issue(hass, DOMAIN, "yale_brand_migration")
async def async_unload_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@ -51,6 +77,8 @@ async def async_setup_august(
"""Set up the August component."""
config = cast(YaleXSConfig, entry.data)
await august_gateway.async_setup(config)
if august_gateway.api.brand == Brand.YALE_HOME:
_async_create_yale_brand_migration_issue(hass, entry)
await august_gateway.async_authenticate()
await august_gateway.async_refresh_access_token_if_needed()
data = entry.runtime_data = AugustData(hass, august_gateway)

Some files were not shown because too many files have changed in this diff Show More