mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 18:39:40 +00:00
Compare commits
109 Commits
sql-query-
...
2024.8.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
516f3295bf | ||
![]() |
2d5289e7dd | ||
![]() |
18efd84a35 | ||
![]() |
b34c90b189 | ||
![]() |
a45c1a3914 | ||
![]() |
1bdf9d657e | ||
![]() |
b294a92ad2 | ||
![]() |
2db362ab3d | ||
![]() |
5f275a6b9c | ||
![]() |
fa914b2811 | ||
![]() |
a128e2e4fc | ||
![]() |
03c7f2cf5b | ||
![]() |
102528e5d3 | ||
![]() |
8f4af4f7c2 | ||
![]() |
667af10017 | ||
![]() |
e5a64a1e0a | ||
![]() |
236fa8e238 | ||
![]() |
70a58a0bb0 | ||
![]() |
769c7f1ea3 | ||
![]() |
5a8045d1fb | ||
![]() |
5a73b636e3 | ||
![]() |
524e09b45e | ||
![]() |
1f46670266 | ||
![]() |
a857f603c8 | ||
![]() |
b7d8f3d005 | ||
![]() |
129035967b | ||
![]() |
45b44f8a59 | ||
![]() |
e80dc52175 | ||
![]() |
22bb3e5477 | ||
![]() |
f89e8e6ceb | ||
![]() |
157a61845b | ||
![]() |
0fcdc3c200 | ||
![]() |
d1f09ecd0c | ||
![]() |
3484ab3c0c | ||
![]() |
80df582ebd | ||
![]() |
dc967e2ef2 | ||
![]() |
e2c1a38d87 | ||
![]() |
94516de724 | ||
![]() |
a2027fc78c | ||
![]() |
be5577c2f9 | ||
![]() |
93dc08a05f | ||
![]() |
def2ace4ec | ||
![]() |
4f0261d739 | ||
![]() |
6103811de8 | ||
![]() |
fd904c65a7 | ||
![]() |
04bf8482b2 | ||
![]() |
f5fd5e0457 | ||
![]() |
0de89b42aa | ||
![]() |
e8914552b1 | ||
![]() |
bfd302109e | ||
![]() |
796ad47dd0 | ||
![]() |
e9915463a9 | ||
![]() |
59aecda8cf | ||
![]() |
7d00ccbbbc | ||
![]() |
55a911120c | ||
![]() |
80abf90c87 | ||
![]() |
8539591307 | ||
![]() |
6234deeee1 | ||
![]() |
81fabb1bfa | ||
![]() |
ff4e5859cf | ||
![]() |
f2e42eafc7 | ||
![]() |
63f28ae2fe | ||
![]() |
5b6c6141c5 | ||
![]() |
396ef7a642 | ||
![]() |
17f59a5665 | ||
![]() |
10846dc97b | ||
![]() |
17bb00727d | ||
![]() |
bc021dbbc6 | ||
![]() |
e3cb9c0844 | ||
![]() |
050e2c9404 | ||
![]() |
5ea447ba48 | ||
![]() |
a23b063922 | ||
![]() |
c269d57259 | ||
![]() |
d512f327c5 | ||
![]() |
9bf8c5a54b | ||
![]() |
725e2f16f5 | ||
![]() |
d98d0cdad0 | ||
![]() |
e2f4aa893f | ||
![]() |
6b81fa89d3 | ||
![]() |
c886587915 | ||
![]() |
059d3eed98 | ||
![]() |
f9ae2b4453 | ||
![]() |
742c7ba23f | ||
![]() |
e7ae5c5c24 | ||
![]() |
ae4fc9504a | ||
![]() |
2ef337ec2e | ||
![]() |
723b7bd532 | ||
![]() |
4fdb11b0d8 | ||
![]() |
fe2e6c37f4 | ||
![]() |
4a75c55a8f | ||
![]() |
dfb59469cf | ||
![]() |
bdb2e1e2e9 | ||
![]() |
c4f6f1e3d8 | ||
![]() |
fb3eae54ea | ||
![]() |
d3f8fce788 | ||
![]() |
44e58a8c87 | ||
![]() |
3d3879b0db | ||
![]() |
a8b1eb34f3 | ||
![]() |
fd77058def | ||
![]() |
b147ca6c5b | ||
![]() |
670c4cacfa | ||
![]() |
1ed0a89303 | ||
![]() |
ab0597da7b | ||
![]() |
a3db6bc8fa | ||
![]() |
9bfc8f6e27 | ||
![]() |
6fddef2dc5 | ||
![]() |
ec08a85aa0 | ||
![]() |
de7af575c5 | ||
![]() |
d3831bae4e |
@@ -146,7 +146,6 @@ requirements: &requirements
|
||||
- homeassistant/package_constraints.txt
|
||||
- requirements*.txt
|
||||
- pyproject.toml
|
||||
- script/licenses.py
|
||||
|
||||
any:
|
||||
- *base_platforms
|
||||
|
8
.github/workflows/builder.yml
vendored
8
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -323,7 +323,7 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.6.0
|
||||
uses: sigstore/cosign-installer@v3.5.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
55
.github/workflows/ci.yaml
vendored
55
.github/workflows/ci.yaml
vendored
@@ -31,16 +31,12 @@ on:
|
||||
description: "Only run mypy"
|
||||
default: false
|
||||
type: boolean
|
||||
audit-licenses-only:
|
||||
description: "Only run audit licenses"
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 10
|
||||
CACHE_VERSION: 9
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 8
|
||||
HA_SHORT_VERSION: "2024.9"
|
||||
HA_SHORT_VERSION: "2024.8"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -226,7 +222,6 @@ jobs:
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs:
|
||||
- info
|
||||
steps:
|
||||
@@ -348,7 +343,6 @@ jobs:
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-other:
|
||||
name: Check other linters
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -514,7 +508,8 @@ jobs:
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
||||
uv pip install -r requirements_all_pytest.txt
|
||||
uv pip install -r requirements_test.txt
|
||||
uv pip install -e . --config-settings editable_mode=compat
|
||||
|
||||
hassfest:
|
||||
@@ -523,7 +518,6 @@ jobs:
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -562,7 +556,6 @@ jobs:
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -596,10 +589,7 @@ jobs:
|
||||
- info
|
||||
- base
|
||||
if: |
|
||||
(github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
|| github.event.inputs.audit-licenses-only == 'true')
|
||||
&& needs.info.outputs.requirements == 'true'
|
||||
needs.info.outputs.requirements == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
@@ -623,7 +613,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pip-licenses --format=json --output-file=licenses.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: licenses
|
||||
path: licenses.json
|
||||
@@ -638,7 +628,6 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
if: |
|
||||
github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
|| github.event.inputs.pylint-only == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -683,9 +672,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 20
|
||||
if: |
|
||||
(github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
|| github.event.inputs.pylint-only == 'true')
|
||||
(github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true')
|
||||
&& (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true')
|
||||
needs:
|
||||
- info
|
||||
@@ -716,21 +703,20 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint tests
|
||||
pylint --ignore-missing-annotations=y tests
|
||||
- name: Run pylint (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pylint tests/components/${{ needs.info.outputs.tests_glob }}
|
||||
pylint --ignore-missing-annotations=y tests/components/${{ needs.info.outputs.tests_glob }}
|
||||
|
||||
mypy:
|
||||
name: Check mypy
|
||||
runs-on: ubuntu-24.04
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
|| github.event.inputs.mypy-only == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -795,7 +781,6 @@ jobs:
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -833,7 +818,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -846,7 +831,6 @@ jobs:
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.test_full_suite == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -934,14 +918,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -967,7 +951,6 @@ jobs:
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.mariadb_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1060,7 +1043,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1068,7 +1051,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1093,7 +1076,6 @@ jobs:
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.postgresql_groups != '[]'
|
||||
needs:
|
||||
- info
|
||||
@@ -1187,7 +1169,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1195,7 +1177,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1238,7 +1220,6 @@ jobs:
|
||||
&& github.event.inputs.lint-only != 'true'
|
||||
&& github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.tests_glob
|
||||
&& needs.info.outputs.test_full_suite == 'false'
|
||||
needs:
|
||||
@@ -1329,14 +1310,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.26.4
|
||||
uses: github/codeql-action/init@v3.25.15
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.26.4
|
||||
uses: github/codeql-action/analyze@v3.25.15
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
14
.github/workflows/wheels.yml
vendored
14
.github/workflows/wheels.yml
vendored
@@ -82,14 +82,14 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -211,7 +211,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
@@ -226,7 +226,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.2
|
||||
rev: v0.5.5
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -12,7 +12,7 @@ repos:
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn,pres,ser,ue
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
|
@@ -95,6 +95,8 @@ homeassistant.components.aruba.*
|
||||
homeassistant.components.arwn.*
|
||||
homeassistant.components.aseko_pool_live.*
|
||||
homeassistant.components.assist_pipeline.*
|
||||
homeassistant.components.asterisk_cdr.*
|
||||
homeassistant.components.asterisk_mbox.*
|
||||
homeassistant.components.asuswrt.*
|
||||
homeassistant.components.autarco.*
|
||||
homeassistant.components.auth.*
|
||||
@@ -196,9 +198,7 @@ homeassistant.components.fritzbox.*
|
||||
homeassistant.components.fritzbox_callmonitor.*
|
||||
homeassistant.components.fronius.*
|
||||
homeassistant.components.frontend.*
|
||||
homeassistant.components.fujitsu_fglair.*
|
||||
homeassistant.components.fully_kiosk.*
|
||||
homeassistant.components.fyta.*
|
||||
homeassistant.components.generic_hygrostat.*
|
||||
homeassistant.components.generic_thermostat.*
|
||||
homeassistant.components.geo_location.*
|
||||
@@ -295,7 +295,6 @@ homeassistant.components.lookin.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.mailbox.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.map.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
|
15
CODEOWNERS
15
CODEOWNERS
@@ -108,8 +108,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/anova/ @Lash-L
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
/tests/components/anthemav/ @hyralex
|
||||
/homeassistant/components/anthropic/ @Shulyaka
|
||||
/tests/components/anthropic/ @Shulyaka
|
||||
/homeassistant/components/aosmith/ @bdr99
|
||||
/tests/components/aosmith/ @bdr99
|
||||
/homeassistant/components/apache_kafka/ @bachya
|
||||
@@ -349,8 +347,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr/ @Robbie1221 @frenck
|
||||
/tests/components/dsmr/ @Robbie1221 @frenck
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
@@ -433,7 +431,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26 @baqs
|
||||
/tests/components/ezviz/ @RenierM26 @baqs
|
||||
/homeassistant/components/faa_delays/ @ntilley905
|
||||
@@ -499,8 +496,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/frontend/ @home-assistant/frontend
|
||||
/homeassistant/components/frontier_silicon/ @wlcrs
|
||||
/tests/components/frontier_silicon/ @wlcrs
|
||||
/homeassistant/components/fujitsu_fglair/ @crevetor
|
||||
/tests/components/fujitsu_fglair/ @crevetor
|
||||
/homeassistant/components/fully_kiosk/ @cgarwood
|
||||
/tests/components/fully_kiosk/ @cgarwood
|
||||
/homeassistant/components/fyta/ @dontinelli
|
||||
@@ -828,6 +823,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/logbook/ @home-assistant/core
|
||||
/homeassistant/components/logger/ @home-assistant/core
|
||||
/tests/components/logger/ @home-assistant/core
|
||||
/homeassistant/components/logi_circle/ @evanjd
|
||||
/tests/components/logi_circle/ @evanjd
|
||||
/homeassistant/components/london_underground/ @jpbede
|
||||
/tests/components/london_underground/ @jpbede
|
||||
/homeassistant/components/lookin/ @ANMalko @bdraco
|
||||
@@ -970,8 +967,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nfandroidtv/ @tkdrob
|
||||
/homeassistant/components/nibe_heatpump/ @elupus
|
||||
/tests/components/nibe_heatpump/ @elupus
|
||||
/homeassistant/components/nice_go/ @IceBotYT
|
||||
/tests/components/nice_go/ @IceBotYT
|
||||
/homeassistant/components/nightscout/ @marciogranzotto
|
||||
/tests/components/nightscout/ @marciogranzotto
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
@@ -1329,8 +1324,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/smlight/ @tl-sl
|
||||
/tests/components/smlight/ @tl-sl
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/tests/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
|
@@ -8,8 +8,6 @@ import glob
|
||||
from http.client import HTTPConnection
|
||||
import importlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
from ssl import SSLContext
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
@@ -145,78 +143,6 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_default_certs,
|
||||
object=SSLContext,
|
||||
function="load_default_certs",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_cert_chain,
|
||||
object=SSLContext,
|
||||
function="load_cert_chain",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.open,
|
||||
object=Path,
|
||||
function="open",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.read_text,
|
||||
object=Path,
|
||||
function="read_text",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.read_bytes,
|
||||
object=Path,
|
||||
function="read_bytes",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.write_text,
|
||||
object=Path,
|
||||
function="write_text",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.write_bytes,
|
||||
object=Path,
|
||||
function="write_bytes",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -586,10 +586,10 @@ async def async_enable_logging(
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
|
||||
sys.excepthook = lambda *args: logging.getLogger().exception(
|
||||
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
||||
"Uncaught exception", exc_info=args
|
||||
)
|
||||
threading.excepthook = lambda args: logging.getLogger().exception(
|
||||
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
||||
"Uncaught thread exception",
|
||||
exc_info=( # type: ignore[arg-type]
|
||||
args.exc_type,
|
||||
@@ -616,9 +616,10 @@ async def async_enable_logging(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
|
||||
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger = logging.getLogger("")
|
||||
logger.addHandler(err_handler)
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
|
5
homeassistant/brands/asterisk.json
Normal file
5
homeassistant/brands/asterisk.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "asterisk",
|
||||
"name": "Asterisk",
|
||||
"integrations": ["asterisk_cdr", "asterisk_mbox"]
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "fujitsu",
|
||||
"name": "Fujitsu",
|
||||
"integrations": ["fujitsu_anywair", "fujitsu_fglair"]
|
||||
}
|
@@ -81,7 +81,7 @@ class AcerSwitch(SwitchEntity):
|
||||
write_timeout: int,
|
||||
) -> None:
|
||||
"""Init of the Acer projector."""
|
||||
self.serial = serial.Serial(
|
||||
self.ser = serial.Serial(
|
||||
port=serial_port, timeout=timeout, write_timeout=write_timeout
|
||||
)
|
||||
self._serial_port = serial_port
|
||||
@@ -99,16 +99,16 @@ class AcerSwitch(SwitchEntity):
|
||||
# was disconnected during runtime.
|
||||
# This way the projector can be reconnected and will still work
|
||||
try:
|
||||
if not self.serial.is_open:
|
||||
self.serial.open()
|
||||
self.serial.write(msg.encode("utf-8"))
|
||||
if not self.ser.is_open:
|
||||
self.ser.open()
|
||||
self.ser.write(msg.encode("utf-8"))
|
||||
# Size is an experience value there is no real limit.
|
||||
# AFAIK there is no limit and no end character so we will usually
|
||||
# need to wait for timeout
|
||||
ret = self.serial.read_until(size=20).decode("utf-8")
|
||||
ret = self.ser.read_until(size=20).decode("utf-8")
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Problem communicating with %s", self._serial_port)
|
||||
self.serial.close()
|
||||
self.ser.close()
|
||||
return ret
|
||||
|
||||
def _write_read_format(self, msg: str) -> str:
|
||||
|
@@ -136,7 +136,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
|
||||
# Tuple to hold data needed for notification
|
||||
NotificationItem = namedtuple( # noqa: PYI024
|
||||
NotificationItem = namedtuple(
|
||||
"NotificationItem", "hnotify huser name plc_datatype callback"
|
||||
)
|
||||
|
||||
|
@@ -59,7 +59,7 @@ async def async_setup_entry(
|
||||
|
||||
platform = async_get_current_platform()
|
||||
for service, method in CAMERA_SERVICES.items():
|
||||
platform.async_register_entity_service(service, None, method)
|
||||
platform.async_register_entity_service(service, {}, method)
|
||||
|
||||
|
||||
class AgentCamera(MjpegCamera):
|
||||
|
@@ -21,7 +21,6 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
|
||||
|
@@ -92,7 +92,9 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except AirGradientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(current_measures.serial_number)
|
||||
await self.async_set_unique_id(
|
||||
current_measures.serial_number, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
await self.set_configuration_source()
|
||||
return self.async_create_entry(
|
||||
|
@@ -1,55 +0,0 @@
|
||||
"""Airgradient Update platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(hours=1)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirGradientConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airgradient update platform."""
|
||||
|
||||
data = config_entry.runtime_data
|
||||
|
||||
async_add_entities([AirGradientUpdate(data.measurement)], True)
|
||||
|
||||
|
||||
class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Representation of Airgradient Update."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
coordinator: AirGradientMeasurementCoordinator
|
||||
|
||||
def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.serial_number}-update"
|
||||
|
||||
@cached_property
|
||||
def should_poll(self) -> bool:
|
||||
"""Return True because we need to poll the latest version."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str:
|
||||
"""Return the installed version of the entity."""
|
||||
return self.coordinator.data.firmware_version
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity."""
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
)
|
||||
)
|
@@ -1,11 +1,9 @@
|
||||
"""Config flow for AirTouch4."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from airtouch4pyapi import AirTouch, AirTouchStatus
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import CONF_HOST
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -18,9 +16,7 @@ class AirtouchConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
|
@@ -31,6 +31,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
@@ -61,6 +62,8 @@ PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_cloud_api_update_interval(
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.8.2"]
|
||||
"requirements": ["aioairzone==0.8.1"]
|
||||
}
|
||||
|
@@ -161,11 +161,6 @@ class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: AirzoneBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return Airzone Cloud binary sensor availability."""
|
||||
return super().available and self.is_on is not None
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
|
@@ -189,11 +189,6 @@ async def async_setup_entry(
|
||||
class AirzoneSensor(AirzoneEntity, SensorEntity):
|
||||
"""Define an Airzone Cloud sensor."""
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return Airzone Cloud sensor availability."""
|
||||
return super().available and self.native_value is not None
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
|
@@ -124,9 +124,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if not entities:
|
||||
return False
|
||||
|
||||
component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off")
|
||||
component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on")
|
||||
component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle")
|
||||
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
|
||||
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
|
||||
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
|
||||
|
||||
await component.async_add_entities(entities)
|
||||
|
||||
@@ -162,8 +162,16 @@ class Alert(Entity):
|
||||
self._data = data
|
||||
|
||||
self._message_template = message_template
|
||||
if self._message_template is not None:
|
||||
self._message_template.hass = hass
|
||||
|
||||
self._done_message_template = done_message_template
|
||||
if self._done_message_template is not None:
|
||||
self._done_message_template.hass = hass
|
||||
|
||||
self._title_template = title_template
|
||||
if self._title_template is not None:
|
||||
self._title_template.hass = hass
|
||||
|
||||
self._notifiers = notifiers
|
||||
self._can_ack = can_ack
|
||||
|
@@ -661,9 +661,12 @@ class RemoteCapabilities(AlexaEntity):
|
||||
def interfaces(self) -> Generator[AlexaCapability]:
|
||||
"""Yield the supported interfaces."""
|
||||
yield AlexaPowerController(self.entity)
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||
if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
|
||||
yield AlexaModeController(
|
||||
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||
)
|
||||
yield AlexaEndpointHealth(self.hass, self.entity)
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
@@ -52,6 +52,7 @@ class AlexaFlashBriefingView(http.HomeAssistantView):
|
||||
"""Initialize Alexa view."""
|
||||
super().__init__()
|
||||
self.flash_briefings = flash_briefings
|
||||
template.attach(hass, self.flash_briefings)
|
||||
|
||||
@callback
|
||||
def get(
|
||||
|
@@ -1206,7 +1206,7 @@ async def async_api_set_mode(
|
||||
raise AlexaInvalidValueError(msg)
|
||||
|
||||
# Remote Activity
|
||||
elif instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
|
||||
if instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
|
||||
activity = mode.split(".")[1]
|
||||
activities: list[str] | None = entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||
if activity != PRESET_MODE_NA and activities and activity in activities:
|
||||
|
@@ -283,7 +283,7 @@ class AlexaPresetResource(AlexaCapabilityResource):
|
||||
"""Implements Alexa PresetResources.
|
||||
|
||||
Use presetResources with RangeController to provide a set of
|
||||
friendlyNames for each RangeController preset.
|
||||
friendlyNamesfor each RangeController preset.
|
||||
|
||||
https://developer.amazon.com/docs/device-apis/resources-and-assets.html#presetresources
|
||||
"""
|
||||
|
@@ -194,7 +194,7 @@ async def async_handle_message(
|
||||
|
||||
try:
|
||||
if not enabled:
|
||||
raise AlexaBridgeUnreachableError( # noqa: TRY301
|
||||
raise AlexaBridgeUnreachableError(
|
||||
"Alexa API not enabled in Home Assistant configuration"
|
||||
)
|
||||
|
||||
|
@@ -8,23 +8,128 @@ CONF_REGION: Final = "region_name"
|
||||
CONF_ACCESS_KEY_ID: Final = "aws_access_key_id"
|
||||
CONF_SECRET_ACCESS_KEY: Final = "aws_secret_access_key"
|
||||
|
||||
DEFAULT_REGION: Final = "us-east-1"
|
||||
SUPPORTED_REGIONS: Final[list[str]] = [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
"ca-central-1",
|
||||
"eu-west-1",
|
||||
"eu-central-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-1",
|
||||
"ap-south-1",
|
||||
"sa-east-1",
|
||||
]
|
||||
|
||||
CONF_ENGINE: Final = "engine"
|
||||
CONF_VOICE: Final = "voice"
|
||||
CONF_OUTPUT_FORMAT: Final = "output_format"
|
||||
CONF_SAMPLE_RATE: Final = "sample_rate"
|
||||
CONF_TEXT_TYPE: Final = "text_type"
|
||||
|
||||
SUPPORTED_OUTPUT_FORMATS: Final[set[str]] = {"mp3", "ogg_vorbis", "pcm"}
|
||||
SUPPORTED_VOICES: Final[list[str]] = [
|
||||
"Aditi", # Hindi
|
||||
"Amy", # English (British)
|
||||
"Aria", # English (New Zealand), Neural
|
||||
"Arlet", # Catalan, Neural
|
||||
"Arthur", # English, Neural
|
||||
"Astrid", # Swedish
|
||||
"Ayanda", # English (South African), Neural
|
||||
"Bianca", # Italian
|
||||
"Brian", # English (British)
|
||||
"Camila", # Portuguese, Brazilian
|
||||
"Carla", # Italian
|
||||
"Carmen", # Romanian
|
||||
"Celine", # French
|
||||
"Chantal", # French Canadian
|
||||
"Conchita", # Spanish (European)
|
||||
"Cristiano", # Portuguese (European)
|
||||
"Daniel", # German, Neural
|
||||
"Dora", # Icelandic
|
||||
"Elin", # Swedish, Neural
|
||||
"Emma", # English
|
||||
"Enrique", # Spanish (European)
|
||||
"Ewa", # Polish
|
||||
"Filiz", # Turkish
|
||||
"Gabrielle", # French (Canadian)
|
||||
"Geraint", # English Welsh
|
||||
"Giorgio", # Italian
|
||||
"Gwyneth", # Welsh
|
||||
"Hala", # Arabic (Gulf), Neural
|
||||
"Hannah", # German (Austrian), Neural
|
||||
"Hans", # German
|
||||
"Hiujin", # Chinese (Cantonese), Neural
|
||||
"Ida", # Norwegian, Neural
|
||||
"Ines", # Portuguese, European # codespell:ignore ines
|
||||
"Ivy", # English
|
||||
"Jacek", # Polish
|
||||
"Jan", # Polish
|
||||
"Joanna", # English
|
||||
"Joey", # English
|
||||
"Justin", # English
|
||||
"Kajal", # English (Indian)/Hindi (Bilingual ), Neural
|
||||
"Karl", # Icelandic
|
||||
"Kendra", # English
|
||||
"Kevin", # English, Neural
|
||||
"Kimberly", # English
|
||||
"Laura", # Dutch, Neural
|
||||
"Lea", # French
|
||||
"Liam", # Canadian French, Neural
|
||||
"Liv", # Norwegian
|
||||
"Lotte", # Dutch
|
||||
"Lucia", # Spanish European
|
||||
"Lupe", # Spanish US
|
||||
"Mads", # Danish
|
||||
"Maja", # Polish
|
||||
"Marlene", # German
|
||||
"Mathieu", # French
|
||||
"Matthew", # English
|
||||
"Maxim", # Russian
|
||||
"Mia", # Spanish Mexican
|
||||
"Miguel", # Spanish US
|
||||
"Mizuki", # Japanese
|
||||
"Naja", # Danish
|
||||
"Nicole", # English Australian
|
||||
"Ola", # Polish, Neural
|
||||
"Olivia", # Female, Australian, Neural
|
||||
"Penelope", # Spanish US
|
||||
"Pedro", # Spanish US, Neural
|
||||
"Raveena", # English, Indian
|
||||
"Ricardo", # Portuguese (Brazilian)
|
||||
"Ruben", # Dutch
|
||||
"Russell", # English (Australian)
|
||||
"Ruth", # English, Neural
|
||||
"Salli", # English
|
||||
"Seoyeon", # Korean
|
||||
"Stephen", # English, Neural
|
||||
"Suvi", # Finnish
|
||||
"Takumi", # Japanese
|
||||
"Tatyana", # Russian
|
||||
"Vicki", # German
|
||||
"Vitoria", # Portuguese, Brazilian
|
||||
"Zeina", # Arabic
|
||||
"Zhiyu", # Chinese
|
||||
]
|
||||
|
||||
SUPPORTED_SAMPLE_RATES: Final[set[str]] = {"8000", "16000", "22050", "24000"}
|
||||
SUPPORTED_OUTPUT_FORMATS: Final[list[str]] = ["mp3", "ogg_vorbis", "pcm"]
|
||||
|
||||
SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, set[str]]] = {
|
||||
"mp3": {"8000", "16000", "22050", "24000"},
|
||||
"ogg_vorbis": {"8000", "16000", "22050"},
|
||||
"pcm": {"8000", "16000"},
|
||||
SUPPORTED_ENGINES: Final[list[str]] = ["neural", "standard"]
|
||||
|
||||
SUPPORTED_SAMPLE_RATES: Final[list[str]] = ["8000", "16000", "22050", "24000"]
|
||||
|
||||
SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, list[str]]] = {
|
||||
"mp3": ["8000", "16000", "22050", "24000"],
|
||||
"ogg_vorbis": ["8000", "16000", "22050"],
|
||||
"pcm": ["8000", "16000"],
|
||||
}
|
||||
|
||||
SUPPORTED_TEXT_TYPES: Final[set[str]] = {"text", "ssml"}
|
||||
SUPPORTED_TEXT_TYPES: Final[list[str]] = ["text", "ssml"]
|
||||
|
||||
CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = {
|
||||
"audio/mpeg": "mp3",
|
||||
@@ -32,8 +137,6 @@ CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = {
|
||||
"audio/pcm": "pcm",
|
||||
}
|
||||
|
||||
DEFAULT_REGION: Final = "us-east-1"
|
||||
|
||||
DEFAULT_ENGINE: Final = "standard"
|
||||
DEFAULT_VOICE: Final = "Joanna"
|
||||
DEFAULT_OUTPUT_FORMAT: Final = "mp3"
|
||||
|
@@ -16,11 +16,6 @@ from homeassistant.components.tts import (
|
||||
)
|
||||
from homeassistant.const import ATTR_CREDENTIALS, CONF_PROFILE_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.generated.amazon_polly import (
|
||||
SUPPORTED_ENGINES,
|
||||
SUPPORTED_REGIONS,
|
||||
SUPPORTED_VOICES,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
@@ -43,10 +38,13 @@ from .const import (
|
||||
DEFAULT_SAMPLE_RATES,
|
||||
DEFAULT_TEXT_TYPE,
|
||||
DEFAULT_VOICE,
|
||||
SUPPORTED_ENGINES,
|
||||
SUPPORTED_OUTPUT_FORMATS,
|
||||
SUPPORTED_REGIONS,
|
||||
SUPPORTED_SAMPLE_RATES,
|
||||
SUPPORTED_SAMPLE_RATES_MAP,
|
||||
SUPPORTED_TEXT_TYPES,
|
||||
SUPPORTED_VOICES,
|
||||
)
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
@@ -17,6 +17,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
import homeassistant.helpers.device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
@@ -24,6 +25,7 @@ import homeassistant.helpers.entity_registry as er
|
||||
from .const import (
|
||||
ATTR_LAST_DATA,
|
||||
CONF_APP_KEY,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
TYPE_SOLARRADIATION,
|
||||
TYPE_SOLARRADIATION_LX,
|
||||
@@ -35,6 +37,7 @@ DATA_CONFIG = "config"
|
||||
|
||||
DEFAULT_SOCKET_MIN_RETRY = 15
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
type AmbientStationConfigEntry = ConfigEntry[AmbientStation]
|
||||
|
||||
|
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioambient import API
|
||||
from aioambient.errors import AmbientError
|
||||
import voluptuous as vol
|
||||
@@ -34,9 +32,7 @@ class AmbientStationFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors if errors else {},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
if not user_input:
|
||||
return await self._show_form()
|
||||
|
@@ -499,7 +499,7 @@ class AmcrestCam(Camera):
|
||||
await getattr(self, f"_async_set_{func}")(value)
|
||||
new_value = await getattr(self, f"_async_get_{func}")()
|
||||
if new_value != value:
|
||||
raise AmcrestCommandFailed # noqa: TRY301
|
||||
raise AmcrestCommandFailed
|
||||
except (AmcrestError, AmcrestCommandFailed) as error:
|
||||
if tries == 1:
|
||||
log_update_error(_LOGGER, action, self.name, description, error)
|
||||
|
@@ -14,6 +14,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
@@ -26,6 +27,9 @@ PLATFORMS: list[Platform] = [
|
||||
]
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Android IP Webcam from a config entry."""
|
||||
websession = async_get_clientsession(hass)
|
||||
|
@@ -87,7 +87,7 @@ async def async_setup_entry(
|
||||
"adb_command",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
||||
SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DOWNLOAD,
|
||||
|
@@ -1,46 +0,0 @@
|
||||
"""The Anthropic integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
except anthropic.AnthropicError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
entry.runtime_data = client
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Anthropic."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
@@ -1,210 +0,0 @@
|
||||
"""Config flow for Anthropic integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.selector import (
|
||||
NumberSelector,
|
||||
NumberSelectorConfig,
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
TemplateSelector,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anthropic."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except anthropic.APITimeoutError:
|
||||
errors["base"] = "timeout_connect"
|
||||
except anthropic.APIConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except anthropic.APIStatusError as e:
|
||||
if isinstance(e.body, dict):
|
||||
errors["base"] = e.body.get("error", {}).get("type", "unknown")
|
||||
else:
|
||||
errors["base"] = "unknown"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="Claude",
|
||||
data=user_input,
|
||||
options=RECOMMENDED_OPTIONS,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Create the options flow."""
|
||||
return AnthropicOptionsFlow(config_entry)
|
||||
|
||||
|
||||
class AnthropicOptionsFlow(OptionsFlow):
|
||||
"""Anthropic config flow options handler."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(anthropic_config_option_schema(self.hass, options)),
|
||||
suggested_values,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=schema,
|
||||
)
|
||||
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
options: dict[str, Any] | MappingProxyType[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label="No control",
|
||||
value="none",
|
||||
)
|
||||
]
|
||||
hass_apis.extend(
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
)
|
||||
|
||||
schema = {
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
|
||||
if options.get(CONF_RECOMMENDED):
|
||||
return schema
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
}
|
||||
)
|
||||
return schema
|
@@ -1,15 +0,0 @@
|
||||
"""Constants for the Anthropic integration."""
|
||||
|
||||
import logging
|
||||
|
||||
DOMAIN = "anthropic"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-sonnet-20240620"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 1024
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
@@ -1,316 +0,0 @@
|
||||
"""Conversation support for Anthropic."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import json
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
import anthropic
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
Message,
|
||||
MessageParam,
|
||||
TextBlock,
|
||||
TextBlockParam,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.components.conversation import trace
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm, template
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import ulid
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AnthropicConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up conversation entities."""
|
||||
agent = AnthropicConversationEntity(config_entry)
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> ToolParam:
|
||||
"""Format tool specification."""
|
||||
return ToolParam(
|
||||
name=tool.name,
|
||||
description=tool.description or "",
|
||||
input_schema=convert(tool.parameters, custom_serializer=custom_serializer),
|
||||
)
|
||||
|
||||
|
||||
def _message_convert(
|
||||
message: Message,
|
||||
) -> MessageParam:
|
||||
"""Convert from class to TypedDict."""
|
||||
param_content: list[TextBlockParam | ToolUseBlockParam] = []
|
||||
|
||||
for message_content in message.content:
|
||||
if isinstance(message_content, TextBlock):
|
||||
param_content.append(TextBlockParam(type="text", text=message_content.text))
|
||||
elif isinstance(message_content, ToolUseBlock):
|
||||
param_content.append(
|
||||
ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=message_content.id,
|
||||
name=message_content.name,
|
||||
input=message_content.input,
|
||||
)
|
||||
)
|
||||
|
||||
return MessageParam(role=message.role, content=param_content)
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
):
|
||||
"""Anthropic conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.history: dict[str, list[MessageParam]] = {}
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
)
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str] | Literal["*"]:
|
||||
"""Return a list of supported languages."""
|
||||
return MATCH_ALL
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
self.entry.async_on_unload(
|
||||
self.entry.add_update_listener(self._async_entry_update_listener)
|
||||
)
|
||||
|
||||
async def async_process(
|
||||
self, user_input: conversation.ConversationInput
|
||||
) -> conversation.ConversationResult:
|
||||
"""Process a sentence."""
|
||||
options = self.entry.options
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
llm_api: llm.APIInstance | None = None
|
||||
tools: list[ToolParam] | None = None
|
||||
user_name: str | None = None
|
||||
llm_context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
)
|
||||
|
||||
if options.get(CONF_LLM_HASS_API):
|
||||
try:
|
||||
llm_api = await llm.async_get_api(
|
||||
self.hass,
|
||||
options[CONF_LLM_HASS_API],
|
||||
llm_context,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
LOGGER.error("Error getting LLM API: %s", err)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
f"Error preparing LLM API: {err}",
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=user_input.conversation_id
|
||||
)
|
||||
tools = [
|
||||
_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools
|
||||
]
|
||||
|
||||
if user_input.conversation_id is None:
|
||||
conversation_id = ulid.ulid_now()
|
||||
messages = []
|
||||
|
||||
elif user_input.conversation_id in self.history:
|
||||
conversation_id = user_input.conversation_id
|
||||
messages = self.history[conversation_id]
|
||||
|
||||
else:
|
||||
# Conversation IDs are ULIDs. We generate a new one if not provided.
|
||||
# If an old OLID is passed in, we will generate a new one to indicate
|
||||
# a new conversation was started. If the user picks their own, they
|
||||
# want to track a conversation and we respect it.
|
||||
try:
|
||||
ulid.ulid_to_bytes(user_input.conversation_id)
|
||||
conversation_id = ulid.ulid_now()
|
||||
except ValueError:
|
||||
conversation_id = user_input.conversation_id
|
||||
|
||||
messages = []
|
||||
|
||||
if (
|
||||
user_input.context
|
||||
and user_input.context.user_id
|
||||
and (
|
||||
user := await self.hass.auth.async_get_user(user_input.context.user_id)
|
||||
)
|
||||
):
|
||||
user_name = user.name
|
||||
|
||||
try:
|
||||
prompt_parts = [
|
||||
template.Template(
|
||||
llm.BASE_PROMPT
|
||||
+ options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
self.hass,
|
||||
).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
"user_name": user_name,
|
||||
"llm_context": llm_context,
|
||||
},
|
||||
parse_result=False,
|
||||
)
|
||||
]
|
||||
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
f"Sorry, I had a problem with my template: {err}",
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=conversation_id
|
||||
)
|
||||
|
||||
if llm_api:
|
||||
prompt_parts.append(llm_api.api_prompt)
|
||||
|
||||
prompt = "\n".join(prompt_parts)
|
||||
|
||||
# Create a copy of the variable because we attach it to the trace
|
||||
messages = [*messages, MessageParam(role="user", content=user_input.text)]
|
||||
|
||||
LOGGER.debug("Prompt: %s", messages)
|
||||
LOGGER.debug("Tools: %s", tools)
|
||||
trace.async_conversation_trace_append(
|
||||
trace.ConversationTraceEventType.AGENT_DETAIL,
|
||||
{"system": prompt, "messages": messages},
|
||||
)
|
||||
|
||||
client = self.entry.runtime_data
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
response = await client.messages.create(
|
||||
model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL),
|
||||
messages=messages,
|
||||
tools=tools or NOT_GIVEN,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
system=prompt,
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
)
|
||||
except anthropic.AnthropicError as err:
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}",
|
||||
)
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=conversation_id
|
||||
)
|
||||
|
||||
LOGGER.debug("Response %s", response)
|
||||
|
||||
messages.append(_message_convert(response))
|
||||
|
||||
if response.stop_reason != "tool_use" or not llm_api:
|
||||
break
|
||||
|
||||
tool_results: list[ToolResultBlockParam] = []
|
||||
for tool_call in response.content:
|
||||
if isinstance(tool_call, TextBlock):
|
||||
LOGGER.info(tool_call.text)
|
||||
|
||||
if not isinstance(tool_call, ToolUseBlock):
|
||||
continue
|
||||
|
||||
tool_input = llm.ToolInput(
|
||||
tool_name=tool_call.name,
|
||||
tool_args=cast(dict[str, Any], tool_call.input),
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
||||
)
|
||||
|
||||
try:
|
||||
tool_response = await llm_api.async_call_tool(tool_input)
|
||||
except (HomeAssistantError, vol.Invalid) as e:
|
||||
tool_response = {"error": type(e).__name__}
|
||||
if str(e):
|
||||
tool_response["error_text"] = str(e)
|
||||
|
||||
LOGGER.debug("Tool response: %s", tool_response)
|
||||
tool_results.append(
|
||||
ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=tool_call.id,
|
||||
content=json.dumps(tool_response),
|
||||
)
|
||||
)
|
||||
|
||||
messages.append(MessageParam(role="user", content=tool_results))
|
||||
|
||||
self.history[conversation_id] = messages
|
||||
|
||||
for content in response.content:
|
||||
if isinstance(content, TextBlock):
|
||||
intent_response.async_set_speech(content.text)
|
||||
break
|
||||
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response, conversation_id=conversation_id
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Handle options update."""
|
||||
# Reload as we update device info + entity name + supported features
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "anthropic",
|
||||
"name": "Anthropic Conversation",
|
||||
"after_dependencies": ["assist_pipeline", "intent"],
|
||||
"codeowners": ["@Shulyaka"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["conversation"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.31.2"]
|
||||
}
|
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"prompt": "Instructions",
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"recommended": "Recommended model settings"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -8,6 +8,7 @@ from typing import Final
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import APCUPSdCoordinator
|
||||
@@ -16,6 +17,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Use config values to set up a function enabling status retrieval."""
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.15.0"],
|
||||
"requirements": ["pyatv==0.14.3"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
@@ -13,12 +13,7 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import DEFAULT_PORT
|
||||
from .coordinator import ApSystemsDataCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@@ -1,102 +0,0 @@
|
||||
"""The read-only binary sensors for APsystems local API integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from APsystemsEZ1 import ReturnAlarmInfo
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import ApSystemsConfigEntry, ApSystemsData
|
||||
from .coordinator import ApSystemsDataCoordinator
|
||||
from .entity import ApSystemsEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ApsystemsLocalApiBinarySensorDescription(BinarySensorEntityDescription):
|
||||
"""Describes Apsystens Inverter binary sensor entity."""
|
||||
|
||||
is_on: Callable[[ReturnAlarmInfo], bool | None]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[ApsystemsLocalApiBinarySensorDescription, ...] = (
|
||||
ApsystemsLocalApiBinarySensorDescription(
|
||||
key="off_grid_status",
|
||||
translation_key="off_grid_status",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_on=lambda c: c.offgrid,
|
||||
),
|
||||
ApsystemsLocalApiBinarySensorDescription(
|
||||
key="dc_1_short_circuit_error_status",
|
||||
translation_key="dc_1_short_circuit_error_status",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_on=lambda c: c.shortcircuit_1,
|
||||
),
|
||||
ApsystemsLocalApiBinarySensorDescription(
|
||||
key="dc_2_short_circuit_error_status",
|
||||
translation_key="dc_2_short_circuit_error_status",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_on=lambda c: c.shortcircuit_2,
|
||||
),
|
||||
ApsystemsLocalApiBinarySensorDescription(
|
||||
key="output_fault_status",
|
||||
translation_key="output_fault_status",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
is_on=lambda c: not c.operating,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ApSystemsConfigEntry,
|
||||
add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the binary sensor platform."""
|
||||
config = config_entry.runtime_data
|
||||
|
||||
add_entities(
|
||||
ApSystemsBinarySensorWithDescription(
|
||||
data=config,
|
||||
entity_description=desc,
|
||||
)
|
||||
for desc in BINARY_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class ApSystemsBinarySensorWithDescription(
|
||||
CoordinatorEntity[ApSystemsDataCoordinator], ApSystemsEntity, BinarySensorEntity
|
||||
):
|
||||
"""Base binary sensor to be used with description."""
|
||||
|
||||
entity_description: ApsystemsLocalApiBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: ApSystemsData,
|
||||
entity_description: ApsystemsLocalApiBinarySensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(data.coordinator)
|
||||
ApSystemsEntity.__init__(self, data)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{data.device_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return value of sensor."""
|
||||
return self.entity_description.is_on(self.coordinator.data.alarm_info)
|
@@ -2,26 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData
|
||||
from APsystemsEZ1 import APsystemsEZ1M, ReturnOutputData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import LOGGER
|
||||
|
||||
|
||||
@dataclass
|
||||
class ApSystemsSensorData:
|
||||
"""Representing different Apsystems sensor data."""
|
||||
|
||||
output_data: ReturnOutputData
|
||||
alarm_info: ReturnAlarmInfo
|
||||
|
||||
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
@@ -34,14 +25,5 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
max_power = (await self.api.get_device_info()).maxPower
|
||||
except (ConnectionError, TimeoutError):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = max_power
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
output_data = await self.api.get_output_data()
|
||||
alarm_info = await self.api.get_alarm_info()
|
||||
return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info)
|
||||
async def _async_update_data(self) -> ReturnOutputData:
|
||||
return await self.api.get_output_data()
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.2.1"]
|
||||
"requirements": ["apsystems-ez1==1.3.3"]
|
||||
}
|
||||
|
@@ -26,6 +26,7 @@ async def async_setup_entry(
|
||||
class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
"""Base sensor to be used with description."""
|
||||
|
||||
_attr_native_max_value = 800
|
||||
_attr_native_min_value = 30
|
||||
_attr_native_step = 1
|
||||
_attr_device_class = NumberDeviceClass.POWER
|
||||
@@ -41,7 +42,6 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
super().__init__(data)
|
||||
self._api = data.coordinator.api
|
||||
self._attr_unique_id = f"{data.device_id}_output_limit"
|
||||
self._attr_native_max_value = data.coordinator.api.max_power
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
|
@@ -148,4 +148,4 @@ class ApSystemsSensorWithDescription(
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return value of sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.output_data)
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
@@ -19,20 +19,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"off_grid_status": {
|
||||
"name": "Off grid status"
|
||||
},
|
||||
"dc_1_short_circuit_error_status": {
|
||||
"name": "DC 1 short circuit error status"
|
||||
},
|
||||
"dc_2_short_circuit_error_status": {
|
||||
"name": "DC 2 short circuit error status"
|
||||
},
|
||||
"output_fault_status": {
|
||||
"name": "Output fault status"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"total_power": {
|
||||
"name": "Total power"
|
||||
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError
|
||||
from APsystemsEZ1 import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -44,12 +45,12 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_is_on = status
|
||||
self._attr_is_on = status == Status.normal
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._api.set_device_power_status(True)
|
||||
await self._api.set_device_power_status(0)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._api.set_device_power_status(False)
|
||||
await self._api.set_device_power_status(1)
|
||||
|
@@ -3,14 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from aioaquacell import AquacellApi
|
||||
from aioaquacell.const import Brand
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_BRAND
|
||||
from .coordinator import AquacellCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
@@ -22,9 +20,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AquacellConfigEntry) ->
|
||||
"""Set up Aquacell from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
brand = entry.data.get(CONF_BRAND, Brand.AQUACELL)
|
||||
|
||||
aquacell_api = AquacellApi(session, brand)
|
||||
aquacell_api = AquacellApi(session)
|
||||
|
||||
coordinator = AquacellCoordinator(hass, aquacell_api)
|
||||
|
||||
|
@@ -7,27 +7,18 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from aioaquacell import ApiException, AquacellApi, AuthenticationFailed
|
||||
from aioaquacell.const import SUPPORTED_BRANDS, Brand
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_BRAND,
|
||||
CONF_REFRESH_TOKEN,
|
||||
CONF_REFRESH_TOKEN_CREATION_TIME,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_BRAND, default=Brand.AQUACELL): vol.In(
|
||||
{key: brand.name for key, brand in SUPPORTED_BRANDS.items()}
|
||||
),
|
||||
vol.Required(CONF_EMAIL): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
@@ -42,7 +33,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the cloud logon step."""
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(
|
||||
@@ -51,7 +42,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = AquacellApi(session, user_input[CONF_BRAND])
|
||||
api = AquacellApi(session)
|
||||
try:
|
||||
refresh_token = await api.authenticate(
|
||||
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
@@ -68,7 +59,6 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=user_input[CONF_EMAIL],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_BRAND: user_input[CONF_BRAND],
|
||||
CONF_REFRESH_TOKEN: refresh_token,
|
||||
CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(),
|
||||
},
|
||||
|
@@ -5,7 +5,6 @@ from datetime import timedelta
|
||||
DOMAIN = "aquacell"
|
||||
DATA_AQUACELL = "DATA_AQUACELL"
|
||||
|
||||
CONF_BRAND = "brand"
|
||||
CONF_REFRESH_TOKEN = "refresh_token"
|
||||
CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time"
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "aquacell",
|
||||
"name": "AquaCell",
|
||||
"name": "Aquacell",
|
||||
"codeowners": ["@Jordi1990"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["http", "network"],
|
||||
|
@@ -2,9 +2,8 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Select the brand of the softener and fill in your softener mobile app credentials",
|
||||
"description": "Fill in your Aquacell mobile app credentials",
|
||||
"data": {
|
||||
"brand": "Brand",
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
|
@@ -6,9 +6,6 @@
|
||||
},
|
||||
"radiation_rate": {
|
||||
"default": "mdi:radioactive"
|
||||
},
|
||||
"radon_concentration": {
|
||||
"default": "mdi:radioactive"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
"requirements": ["aranet4==2.3.4"]
|
||||
}
|
||||
|
@@ -99,13 +99,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
suggested_display_precision=4,
|
||||
scale=0.000001,
|
||||
),
|
||||
"radon_concentration": AranetSensorEntityDescription(
|
||||
key="radon_concentration",
|
||||
translation_key="radon_concentration",
|
||||
name="Radon Concentration",
|
||||
native_unit_of_measurement="Bq/m³",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": AranetSensorEntityDescription(
|
||||
key="battery",
|
||||
name="Battery",
|
||||
|
@@ -11,10 +11,12 @@ from arcam.fmj.client import Client
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
SIGNAL_CLIENT_DATA,
|
||||
SIGNAL_CLIENT_STARTED,
|
||||
SIGNAL_CLIENT_STOPPED,
|
||||
@@ -24,6 +26,7 @@ type ArcamFmjConfigEntry = ConfigEntry[Client]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
|
@@ -87,6 +87,8 @@ def setup_platform(
|
||||
if value_template is None:
|
||||
return lambda value: value
|
||||
|
||||
value_template.hass = hass
|
||||
|
||||
def _render(value):
|
||||
try:
|
||||
return value_template.async_render({"value": value}, parse_result=False)
|
||||
|
@@ -1 +0,0 @@
|
||||
"""Virtual integration: ArtSound."""
|
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "artsound",
|
||||
"name": "ArtSound",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "linkplay"
|
||||
}
|
@@ -6,11 +6,13 @@ from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
from .const import SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH
|
||||
from typing import Final
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_SAMPLE_RATE: Final = 16000 # Hz
|
||||
_SAMPLE_WIDTH: Final = 2 # bytes
|
||||
|
||||
|
||||
class VadSensitivity(StrEnum):
|
||||
"""How quickly the end of a voice command is detected."""
|
||||
@@ -24,12 +26,12 @@ class VadSensitivity(StrEnum):
|
||||
"""Return seconds of silence for sensitivity level."""
|
||||
sensitivity = VadSensitivity(sensitivity)
|
||||
if sensitivity == VadSensitivity.RELAXED:
|
||||
return 1.25
|
||||
return 2.0
|
||||
|
||||
if sensitivity == VadSensitivity.AGGRESSIVE:
|
||||
return 0.25
|
||||
return 0.5
|
||||
|
||||
return 0.7
|
||||
return 1.0
|
||||
|
||||
|
||||
class AudioBuffer:
|
||||
@@ -78,7 +80,7 @@ class VoiceCommandSegmenter:
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
silence_seconds: float = 0.7
|
||||
silence_seconds: float = 1.0
|
||||
"""Seconds of silence after voice command has ended."""
|
||||
|
||||
timeout_seconds: float = 15.0
|
||||
@@ -90,9 +92,6 @@ class VoiceCommandSegmenter:
|
||||
in_command: bool = False
|
||||
"""True if inside voice command."""
|
||||
|
||||
timed_out: bool = False
|
||||
"""True a timeout occurred during voice command."""
|
||||
|
||||
_speech_seconds_left: float = 0.0
|
||||
"""Seconds left before considering voice command as started."""
|
||||
|
||||
@@ -122,9 +121,6 @@ class VoiceCommandSegmenter:
|
||||
|
||||
Returns False when command is done.
|
||||
"""
|
||||
if self.timed_out:
|
||||
self.timed_out = False
|
||||
|
||||
self._timeout_seconds_left -= chunk_seconds
|
||||
if self._timeout_seconds_left <= 0:
|
||||
_LOGGER.warning(
|
||||
@@ -132,7 +128,6 @@ class VoiceCommandSegmenter:
|
||||
self.timeout_seconds,
|
||||
)
|
||||
self.reset()
|
||||
self.timed_out = True
|
||||
return False
|
||||
|
||||
if not self.in_command:
|
||||
@@ -184,9 +179,7 @@ class VoiceCommandSegmenter:
|
||||
"""
|
||||
if vad_samples_per_chunk is None:
|
||||
# No chunking
|
||||
chunk_seconds = (
|
||||
len(chunk) // (SAMPLE_WIDTH * SAMPLE_CHANNELS)
|
||||
) / SAMPLE_RATE
|
||||
chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE
|
||||
is_speech = vad_is_speech(chunk)
|
||||
return self.process(chunk_seconds, is_speech)
|
||||
|
||||
@@ -194,8 +187,8 @@ class VoiceCommandSegmenter:
|
||||
raise ValueError("leftover_chunk_buffer is required when vad uses chunking")
|
||||
|
||||
# With chunking
|
||||
seconds_per_chunk = vad_samples_per_chunk / SAMPLE_RATE
|
||||
bytes_per_chunk = vad_samples_per_chunk * (SAMPLE_WIDTH * SAMPLE_CHANNELS)
|
||||
seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE
|
||||
bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||
for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer):
|
||||
is_speech = vad_is_speech(vad_chunk)
|
||||
if not self.process(seconds_per_chunk, is_speech):
|
||||
|
1
homeassistant/components/asterisk_cdr/__init__.py
Normal file
1
homeassistant/components/asterisk_cdr/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""The asterisk_cdr component."""
|
70
homeassistant/components/asterisk_cdr/mailbox.py
Normal file
70
homeassistant/components/asterisk_cdr/mailbox.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Support for the Asterisk CDR interface."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.asterisk_mbox import (
|
||||
DOMAIN as ASTERISK_DOMAIN,
|
||||
SIGNAL_CDR_UPDATE,
|
||||
)
|
||||
from homeassistant.components.mailbox import Mailbox
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
MAILBOX_NAME = "asterisk_cdr"
|
||||
|
||||
|
||||
async def async_get_handler(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> Mailbox:
|
||||
"""Set up the Asterix CDR platform."""
|
||||
return AsteriskCDR(hass, MAILBOX_NAME)
|
||||
|
||||
|
||||
class AsteriskCDR(Mailbox):
|
||||
"""Asterisk VM Call Data Record mailbox."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, name: str) -> None:
|
||||
"""Initialize Asterisk CDR."""
|
||||
super().__init__(hass, name)
|
||||
self.cdr: list[dict[str, Any]] = []
|
||||
async_dispatcher_connect(self.hass, SIGNAL_CDR_UPDATE, self._update_callback)
|
||||
|
||||
@callback
|
||||
def _update_callback(self, msg: list[dict[str, Any]]) -> Any:
|
||||
"""Update the message count in HA, if needed."""
|
||||
self._build_message()
|
||||
self.async_update()
|
||||
|
||||
def _build_message(self) -> None:
|
||||
"""Build message structure."""
|
||||
cdr: list[dict[str, Any]] = []
|
||||
for entry in self.hass.data[ASTERISK_DOMAIN].cdr:
|
||||
timestamp = datetime.datetime.strptime(
|
||||
entry["time"], "%Y-%m-%d %H:%M:%S"
|
||||
).timestamp()
|
||||
info = {
|
||||
"origtime": timestamp,
|
||||
"callerid": entry["callerid"],
|
||||
"duration": entry["duration"],
|
||||
}
|
||||
sha = hashlib.sha256(str(entry).encode("utf-8")).hexdigest()
|
||||
msg = (
|
||||
f"Destination: {entry['dest']}\n"
|
||||
f"Application: {entry['application']}\n "
|
||||
f"Context: {entry['context']}"
|
||||
)
|
||||
cdr.append({"info": info, "sha": sha, "text": msg})
|
||||
self.cdr = cdr
|
||||
|
||||
async def async_get_messages(self) -> list[dict[str, Any]]:
|
||||
"""Return a list of the current messages."""
|
||||
if not self.cdr:
|
||||
self._build_message()
|
||||
return self.cdr
|
8
homeassistant/components/asterisk_cdr/manifest.json
Normal file
8
homeassistant/components/asterisk_cdr/manifest.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"domain": "asterisk_cdr",
|
||||
"name": "Asterisk Call Detail Records",
|
||||
"codeowners": [],
|
||||
"dependencies": ["asterisk_mbox"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/asterisk_cdr",
|
||||
"iot_class": "local_polling"
|
||||
}
|
153
homeassistant/components/asterisk_mbox/__init__.py
Normal file
153
homeassistant/components/asterisk_mbox/__init__.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Support for Asterisk Voicemail interface."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from asterisk_mbox import Client as asteriskClient
|
||||
from asterisk_mbox.commands import (
|
||||
CMD_MESSAGE_CDR,
|
||||
CMD_MESSAGE_CDR_AVAILABLE,
|
||||
CMD_MESSAGE_LIST,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import discovery
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "asterisk_mbox"
|
||||
|
||||
SIGNAL_DISCOVER_PLATFORM = "asterisk_mbox.discover_platform"
|
||||
SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request"
|
||||
SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated"
|
||||
SIGNAL_CDR_UPDATE = "asterisk_mbox.message_updated"
|
||||
SIGNAL_CDR_REQUEST = "asterisk_mbox.message_request"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up for the Asterisk Voicemail box."""
|
||||
conf: dict[str, Any] = config[DOMAIN]
|
||||
|
||||
host: str = conf[CONF_HOST]
|
||||
port: int = conf[CONF_PORT]
|
||||
password: str = conf[CONF_PASSWORD]
|
||||
|
||||
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
|
||||
create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_integration",
|
||||
breaks_in_ha_version="2024.9.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Asterisk Voicemail",
|
||||
"mailbox": "mailbox",
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class AsteriskData:
|
||||
"""Store Asterisk mailbox data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
host: str,
|
||||
port: int,
|
||||
password: str,
|
||||
config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Init the Asterisk data object."""
|
||||
|
||||
self.hass = hass
|
||||
self.config = config
|
||||
self.messages: list[dict[str, Any]] | None = None
|
||||
self.cdr: list[dict[str, Any]] | None = None
|
||||
|
||||
dispatcher_connect(self.hass, SIGNAL_MESSAGE_REQUEST, self._request_messages)
|
||||
dispatcher_connect(self.hass, SIGNAL_CDR_REQUEST, self._request_cdr)
|
||||
dispatcher_connect(self.hass, SIGNAL_DISCOVER_PLATFORM, self._discover_platform)
|
||||
# Only connect after signal connection to ensure we don't miss any
|
||||
self.client = asteriskClient(host, port, password, self.handle_data)
|
||||
|
||||
@callback
|
||||
def _discover_platform(self, component: str) -> None:
|
||||
_LOGGER.debug("Adding mailbox %s", component)
|
||||
self.hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
self.hass, "mailbox", component, {}, self.config
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def handle_data(
|
||||
self, command: int, msg: list[dict[str, Any]] | dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle changes to the mailbox."""
|
||||
|
||||
if command == CMD_MESSAGE_LIST:
|
||||
msg = cast(list[dict[str, Any]], msg)
|
||||
_LOGGER.debug("AsteriskVM sent updated message list: Len %d", len(msg))
|
||||
old_messages = self.messages
|
||||
self.messages = sorted(
|
||||
msg, key=lambda item: item["info"]["origtime"], reverse=True
|
||||
)
|
||||
if not isinstance(old_messages, list):
|
||||
async_dispatcher_send(self.hass, SIGNAL_DISCOVER_PLATFORM, DOMAIN)
|
||||
async_dispatcher_send(self.hass, SIGNAL_MESSAGE_UPDATE, self.messages)
|
||||
elif command == CMD_MESSAGE_CDR:
|
||||
msg = cast(dict[str, Any], msg)
|
||||
_LOGGER.debug(
|
||||
"AsteriskVM sent updated CDR list: Len %d", len(msg.get("entries", []))
|
||||
)
|
||||
self.cdr = msg["entries"]
|
||||
async_dispatcher_send(self.hass, SIGNAL_CDR_UPDATE, self.cdr)
|
||||
elif command == CMD_MESSAGE_CDR_AVAILABLE:
|
||||
if not isinstance(self.cdr, list):
|
||||
_LOGGER.debug("AsteriskVM adding CDR platform")
|
||||
self.cdr = []
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_DISCOVER_PLATFORM, "asterisk_cdr"
|
||||
)
|
||||
async_dispatcher_send(self.hass, SIGNAL_CDR_REQUEST)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"AsteriskVM sent unknown message '%d' len: %d", command, len(msg)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _request_messages(self) -> None:
|
||||
"""Handle changes to the mailbox."""
|
||||
_LOGGER.debug("Requesting message list")
|
||||
self.client.messages()
|
||||
|
||||
@callback
|
||||
def _request_cdr(self) -> None:
|
||||
"""Handle changes to the CDR."""
|
||||
_LOGGER.debug("Requesting CDR list")
|
||||
self.client.get_cdr()
|
86
homeassistant/components/asterisk_mbox/mailbox.py
Normal file
86
homeassistant/components/asterisk_mbox/mailbox.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Support for the Asterisk Voicemail interface."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from asterisk_mbox import ServerError
|
||||
|
||||
from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN as ASTERISK_DOMAIN, AsteriskData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request"
|
||||
SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated"
|
||||
|
||||
|
||||
async def async_get_handler(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> Mailbox:
|
||||
"""Set up the Asterix VM platform."""
|
||||
return AsteriskMailbox(hass, ASTERISK_DOMAIN)
|
||||
|
||||
|
||||
class AsteriskMailbox(Mailbox):
|
||||
"""Asterisk VM Sensor."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, name: str) -> None:
|
||||
"""Initialize Asterisk mailbox."""
|
||||
super().__init__(hass, name)
|
||||
async_dispatcher_connect(
|
||||
self.hass, SIGNAL_MESSAGE_UPDATE, self._update_callback
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_callback(self, msg: str) -> None:
|
||||
"""Update the message count in HA, if needed."""
|
||||
self.async_update()
|
||||
|
||||
@property
|
||||
def media_type(self) -> str:
|
||||
"""Return the supported media type."""
|
||||
return CONTENT_TYPE_MPEG
|
||||
|
||||
@property
|
||||
def can_delete(self) -> bool:
|
||||
"""Return if messages can be deleted."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def has_media(self) -> bool:
|
||||
"""Return if messages have attached media files."""
|
||||
return True
|
||||
|
||||
async def async_get_media(self, msgid: str) -> bytes:
|
||||
"""Return the media blob for the msgid."""
|
||||
|
||||
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
|
||||
client = data.client
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(
|
||||
partial(client.mp3, msgid, sync=True)
|
||||
)
|
||||
except ServerError as err:
|
||||
raise StreamError(err) from err
|
||||
|
||||
async def async_get_messages(self) -> list[dict[str, Any]]:
|
||||
"""Return a list of the current messages."""
|
||||
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
|
||||
return data.messages or []
|
||||
|
||||
async def async_delete(self, msgid: str) -> bool:
|
||||
"""Delete the specified messages."""
|
||||
data: AsteriskData = self.hass.data[ASTERISK_DOMAIN]
|
||||
client = data.client
|
||||
_LOGGER.info("Deleting: %s", msgid)
|
||||
await self.hass.async_add_executor_job(client.delete, msgid)
|
||||
return True
|
9
homeassistant/components/asterisk_mbox/manifest.json
Normal file
9
homeassistant/components/asterisk_mbox/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "asterisk_mbox",
|
||||
"name": "Asterisk Voicemail",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/asterisk_mbox",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["asterisk_mbox"],
|
||||
"requirements": ["asterisk_mbox==0.5.0"]
|
||||
}
|
8
homeassistant/components/asterisk_mbox/strings.json
Normal file
8
homeassistant/components/asterisk_mbox/strings.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"issues": {
|
||||
"deprecated_integration": {
|
||||
"title": "The {integration_title} is being removed",
|
||||
"description": "{integration_title} is being removed as the `{mailbox}` platform is being removed and {integration_title} supports no other platforms. Remove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
}
|
||||
}
|
||||
}
|
@@ -52,7 +52,7 @@ SENSORS_TYPE_LOAD_AVG = "sensors_load_avg"
|
||||
SENSORS_TYPE_RATES = "sensors_rates"
|
||||
SENSORS_TYPE_TEMPERATURES = "sensors_temperatures"
|
||||
|
||||
WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024
|
||||
WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"])
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -9,7 +9,7 @@ from typing import Any
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
from yalexs.authenticator_common import ValidationResult
|
||||
from yalexs.const import BRANDS, DEFAULT_BRAND
|
||||
from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND
|
||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -118,7 +118,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(
|
||||
CONF_BRAND,
|
||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
||||
): vol.In(BRANDS),
|
||||
): vol.In(BRANDS_WITHOUT_OAUTH),
|
||||
vol.Required(
|
||||
CONF_LOGIN_METHOD,
|
||||
default=self._user_auth_details.get(
|
||||
@@ -208,7 +208,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
vol.Required(
|
||||
CONF_BRAND,
|
||||
default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND),
|
||||
): vol.In(BRANDS),
|
||||
): vol.In(BRANDS_WITHOUT_OAUTH),
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.1.4", "yalexs-ble==2.4.3"]
|
||||
"requirements": ["yalexs==8.4.1", "yalexs-ble==2.4.3"]
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import socket
|
||||
|
||||
import aiohttp
|
||||
from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType
|
||||
@@ -25,7 +26,14 @@ def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSess
|
||||
# Create an aiohttp session instead of using the default one since the
|
||||
# default one is likely to trigger august's WAF if another integration
|
||||
# is also using Cloudflare
|
||||
return aiohttp_client.async_create_clientsession(hass)
|
||||
#
|
||||
# The family is set to AF_INET because IPv6 keeps coming up as an issue
|
||||
# see https://github.com/home-assistant/core/issues/97146
|
||||
#
|
||||
# When https://github.com/aio-libs/aiohttp/issues/4451 is implemented
|
||||
# we can allow IPv6 again
|
||||
#
|
||||
return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET)
|
||||
|
||||
|
||||
def retrieve_time_based_activity(
|
||||
|
@@ -322,8 +322,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
},
|
||||
trigger_service_handler,
|
||||
)
|
||||
component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle")
|
||||
component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on")
|
||||
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
|
||||
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
|
||||
component.async_register_entity_service(
|
||||
SERVICE_TURN_OFF,
|
||||
{vol.Optional(CONF_STOP_ACTIONS, default=DEFAULT_STOP_ACTIONS): cv.boolean},
|
||||
|
@@ -68,20 +68,20 @@ class BangOlufsenModel(StrEnum):
|
||||
class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
PLAYBACK_PROGRESS = "playback_progress"
|
||||
PLAYBACK_SOURCE = "playback_source"
|
||||
PLAYBACK_STATE = "playback_state"
|
||||
SOFTWARE_UPDATE_STATE = "software_update_state"
|
||||
SOURCE_CHANGE = "source_change"
|
||||
VOLUME = "volume"
|
||||
PLAYBACK_ERROR: Final[str] = "playback_error"
|
||||
PLAYBACK_METADATA: Final[str] = "playback_metadata"
|
||||
PLAYBACK_PROGRESS: Final[str] = "playback_progress"
|
||||
PLAYBACK_SOURCE: Final[str] = "playback_source"
|
||||
PLAYBACK_STATE: Final[str] = "playback_state"
|
||||
SOFTWARE_UPDATE_STATE: Final[str] = "software_update_state"
|
||||
SOURCE_CHANGE: Final[str] = "source_change"
|
||||
VOLUME: Final[str] = "volume"
|
||||
|
||||
# Sub-notifications
|
||||
NOTIFICATION = "notification"
|
||||
REMOTE_MENU_CHANGED = "remoteMenuChanged"
|
||||
NOTIFICATION: Final[str] = "notification"
|
||||
REMOTE_MENU_CHANGED: Final[str] = "remoteMenuChanged"
|
||||
|
||||
ALL = "all"
|
||||
ALL: Final[str] = "all"
|
||||
|
||||
|
||||
DOMAIN: Final[str] = "bang_olufsen"
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
@@ -138,25 +137,65 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Turn on the dispatchers."""
|
||||
await self._initialize()
|
||||
|
||||
signal_handlers: dict[str, Callable] = {
|
||||
CONNECTION_STATUS: self._async_update_connection_state,
|
||||
WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error,
|
||||
WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata,
|
||||
WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress,
|
||||
WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state,
|
||||
WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources,
|
||||
WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change,
|
||||
WebsocketNotification.VOLUME: self._async_update_volume,
|
||||
}
|
||||
|
||||
for signal, signal_handler in signal_handlers.items():
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{signal}",
|
||||
signal_handler,
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
self._async_update_playback_error,
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
self._async_update_playback_metadata,
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
self._async_update_playback_progress,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
self._async_update_playback_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
self._async_update_sources,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
self._async_update_source_change,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
self._async_update_volume,
|
||||
)
|
||||
)
|
||||
|
||||
async def _initialize(self) -> None:
|
||||
"""Initialize connection dependent variables."""
|
||||
|
@@ -9,8 +9,11 @@ from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
def get_device(hass: HomeAssistant | None, unique_id: str) -> DeviceEntry | None:
|
||||
"""Get the device."""
|
||||
if not isinstance(hass, HomeAssistant):
|
||||
return None
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device = device_registry.async_get_device({(DOMAIN, unique_id)})
|
||||
assert device
|
||||
|
@@ -20,7 +20,6 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
@@ -93,14 +92,12 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
"""Send notification dispatch."""
|
||||
# Try to match the notification type with available WebsocketNotification members
|
||||
notification_type = try_parse_enum(WebsocketNotification, notification.value)
|
||||
|
||||
if notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
)
|
||||
if notification.value:
|
||||
if WebsocketNotification.REMOTE_MENU_CHANGED in notification.value:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
)
|
||||
|
||||
def on_playback_error_notification(self, notification: PlaybackError) -> None:
|
||||
"""Send playback_error dispatch."""
|
||||
@@ -157,6 +154,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
software_status = await self._client.get_softwareupdate_status()
|
||||
|
||||
# Update the HA device if the sw version does not match
|
||||
if not self._device:
|
||||
self._device = get_device(self.hass, self._unique_id)
|
||||
|
||||
assert self._device
|
||||
|
||||
if software_status.software_version != self._device.sw_version:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
@@ -167,6 +169,10 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
|
||||
def on_all_notifications_raw(self, notification: dict) -> None:
|
||||
"""Receive all notifications."""
|
||||
if not self._device:
|
||||
self._device = get_device(self.hass, self._unique_id)
|
||||
|
||||
assert self._device
|
||||
|
||||
# Add the device_id and serial_number to the notification
|
||||
notification["device_id"] = self._device.id
|
||||
|
@@ -39,7 +39,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> BboxDeviceScanner |
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
|
||||
Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) # noqa: PYI024
|
||||
Device = namedtuple("Device", ["mac", "name", "ip", "last_update"])
|
||||
|
||||
|
||||
class BboxDeviceScanner(DeviceScanner):
|
||||
|
@@ -243,8 +243,8 @@
|
||||
"power": {
|
||||
"name": "Power",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
},
|
||||
"presence": {
|
||||
|
@@ -6,7 +6,6 @@ from typing import Any
|
||||
|
||||
from blebox_uniapi.box import Box
|
||||
import blebox_uniapi.cover
|
||||
from blebox_uniapi.cover import BleboxCoverState
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
@@ -29,18 +28,19 @@ BLEBOX_TO_COVER_DEVICE_CLASSES = {
|
||||
"shutter": CoverDeviceClass.SHUTTER,
|
||||
}
|
||||
|
||||
|
||||
BLEBOX_TO_HASS_COVER_STATES = {
|
||||
None: None,
|
||||
# all blebox covers
|
||||
BleboxCoverState.MOVING_DOWN: STATE_CLOSING,
|
||||
BleboxCoverState.MOVING_UP: STATE_OPENING,
|
||||
BleboxCoverState.MANUALLY_STOPPED: STATE_OPEN,
|
||||
BleboxCoverState.LOWER_LIMIT_REACHED: STATE_CLOSED,
|
||||
BleboxCoverState.UPPER_LIMIT_REACHED: STATE_OPEN,
|
||||
# extra states of gateController product
|
||||
BleboxCoverState.OVERLOAD: STATE_OPEN,
|
||||
BleboxCoverState.MOTOR_FAILURE: STATE_OPEN,
|
||||
BleboxCoverState.SAFETY_STOP: STATE_OPEN,
|
||||
0: STATE_CLOSING, # moving down
|
||||
1: STATE_OPENING, # moving up
|
||||
2: STATE_OPEN, # manually stopped
|
||||
3: STATE_CLOSED, # lower limit
|
||||
4: STATE_OPEN, # upper limit / open
|
||||
# gateController
|
||||
5: STATE_OPEN, # overload
|
||||
6: STATE_OPEN, # motor failure
|
||||
# 7 is not used
|
||||
8: STATE_OPEN, # safety stop
|
||||
}
|
||||
|
||||
|
||||
@@ -64,20 +64,14 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
"""Initialize a BleBox cover feature."""
|
||||
super().__init__(feature)
|
||||
self._attr_device_class = BLEBOX_TO_COVER_DEVICE_CLASSES[feature.device_class]
|
||||
position = CoverEntityFeature.SET_POSITION if feature.is_slider else 0
|
||||
stop = CoverEntityFeature.STOP if feature.has_stop else 0
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
position | stop | CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if feature.is_slider:
|
||||
self._attr_supported_features |= CoverEntityFeature.SET_POSITION
|
||||
|
||||
if feature.has_stop:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
|
||||
if feature.has_tilt:
|
||||
self._attr_supported_features |= (
|
||||
CoverEntityFeature.SET_TILT_POSITION
|
||||
| CoverEntityFeature.OPEN_TILT
|
||||
| CoverEntityFeature.CLOSE_TILT
|
||||
self._attr_supported_features = (
|
||||
self._attr_supported_features | CoverEntityFeature.SET_TILT_POSITION
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -111,24 +105,16 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
return self._is_state(STATE_CLOSED)
|
||||
|
||||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Fully open the cover position."""
|
||||
"""Open the cover position."""
|
||||
await self._feature.async_open()
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Fully close the cover position."""
|
||||
"""Close the cover position."""
|
||||
await self._feature.async_close()
|
||||
|
||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Fully open the cover tilt."""
|
||||
await self._feature.async_set_tilt_position(0)
|
||||
|
||||
async def async_close_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Fully close the cover tilt."""
|
||||
# note: values are reversed
|
||||
await self._feature.async_set_tilt_position(100)
|
||||
|
||||
async def async_set_cover_position(self, **kwargs: Any) -> None:
|
||||
"""Set the cover position."""
|
||||
|
||||
position = kwargs[ATTR_POSITION]
|
||||
await self._feature.async_set_position(100 - position)
|
||||
|
||||
@@ -138,6 +124,7 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
|
||||
async def async_set_cover_tilt_position(self, **kwargs: Any) -> None:
|
||||
"""Set the tilt position."""
|
||||
|
||||
position = kwargs[ATTR_TILT_POSITION]
|
||||
await self._feature.async_set_tilt_position(100 - position)
|
||||
|
||||
|
@@ -14,13 +14,13 @@ from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
POWER_VOLT_AMPERE_REACTIVE,
|
||||
UnitOfApparentPower,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfEnergy,
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfReactivePower,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
@@ -85,7 +85,7 @@ SENSOR_TYPES = (
|
||||
SensorEntityDescription(
|
||||
key="reactivePower",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE,
|
||||
native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="activePower",
|
||||
|
@@ -51,8 +51,8 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(SERVICE_RECORD, None, "record")
|
||||
platform.async_register_entity_service(SERVICE_TRIGGER, None, "trigger_camera")
|
||||
platform.async_register_entity_service(SERVICE_RECORD, {}, "record")
|
||||
platform.async_register_entity_service(SERVICE_TRIGGER, {}, "trigger_camera")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SAVE_RECENT_CLIPS,
|
||||
{vol.Required(CONF_FILE_PATH): cv.string},
|
||||
|
@@ -19,7 +19,7 @@
|
||||
"bluetooth-adapters==0.19.4",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.19.4",
|
||||
"dbus-fast==2.23.0",
|
||||
"habluetooth==3.3.2"
|
||||
"dbus-fast==2.22.1",
|
||||
"habluetooth==3.1.3"
|
||||
]
|
||||
}
|
||||
|
@@ -23,6 +23,8 @@ from .coordinator import BMWDataUpdateCoordinator
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
SERVICE_SCHEMA = vol.Schema(
|
||||
vol.Any(
|
||||
{vol.Required(ATTR_VIN): cv.string},
|
||||
|
@@ -52,7 +52,7 @@ async def async_setup_entry(
|
||||
for service in ENTITY_SERVICES:
|
||||
platform.async_register_entity_service(
|
||||
service,
|
||||
None,
|
||||
{},
|
||||
f"async_{service}",
|
||||
)
|
||||
|
||||
|
@@ -22,6 +22,7 @@ from homeassistant.components.todo import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -61,12 +62,14 @@ async def async_setup_entry(
|
||||
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PUSH_NOTIFICATION,
|
||||
{
|
||||
vol.Required(ATTR_NOTIFICATION_TYPE): vol.All(
|
||||
vol.Upper, cv.enum(BringNotificationType)
|
||||
),
|
||||
vol.Optional(ATTR_ITEM_NAME): cv.string,
|
||||
},
|
||||
make_entity_service_schema(
|
||||
{
|
||||
vol.Required(ATTR_NOTIFICATION_TYPE): vol.All(
|
||||
vol.Upper, cv.enum(BringNotificationType)
|
||||
),
|
||||
vol.Optional(ATTR_ITEM_NAME): cv.string,
|
||||
}
|
||||
),
|
||||
"async_send_message",
|
||||
)
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["brother==4.3.0"],
|
||||
"requirements": ["brother==4.2.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
import dataclasses
|
||||
|
||||
from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState
|
||||
from bsblan import BSBLAN, Device, Info, StaticState
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -22,7 +22,7 @@ PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BSBLanData:
|
||||
class HomeAssistantBSBLANData:
|
||||
"""BSBLan data stored in the Home Assistant data object."""
|
||||
|
||||
coordinator: BSBLanUpdateCoordinator
|
||||
@@ -35,29 +35,23 @@ class BSBLanData:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up BSB-Lan from a config entry."""
|
||||
|
||||
# create config using BSBLANConfig
|
||||
config = BSBLANConfig(
|
||||
host=entry.data[CONF_HOST],
|
||||
session = async_get_clientsession(hass)
|
||||
bsblan = BSBLAN(
|
||||
entry.data[CONF_HOST],
|
||||
passkey=entry.data[CONF_PASSKEY],
|
||||
port=entry.data[CONF_PORT],
|
||||
username=entry.data.get(CONF_USERNAME),
|
||||
password=entry.data.get(CONF_PASSWORD),
|
||||
session=session,
|
||||
)
|
||||
|
||||
# create BSBLAN client
|
||||
session = async_get_clientsession(hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
|
||||
# Create and perform first refresh of the coordinator
|
||||
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Fetch all required data concurrently
|
||||
device = await bsblan.device()
|
||||
info = await bsblan.info()
|
||||
static = await bsblan.static_values()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BSBLanData(
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantBSBLANData(
|
||||
client=bsblan,
|
||||
coordinator=coordinator,
|
||||
device=device,
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bsblan import BSBLANError
|
||||
from bsblan import BSBLAN, BSBLANError, Device, Info, State, StaticState
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_HVAC_MODE,
|
||||
@@ -21,11 +21,15 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from . import BSBLanData
|
||||
from . import HomeAssistantBSBLANData
|
||||
from .const import ATTR_TARGET_TEMPERATURE, DOMAIN
|
||||
from .entity import BSBLanEntity
|
||||
from .entity import BSBLANEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -47,17 +51,24 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up BSBLAN device based on a config entry."""
|
||||
data: BSBLanData = hass.data[DOMAIN][entry.entry_id]
|
||||
data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities(
|
||||
[
|
||||
BSBLANClimate(
|
||||
data,
|
||||
data.coordinator,
|
||||
data.client,
|
||||
data.device,
|
||||
data.info,
|
||||
data.static,
|
||||
entry,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
class BSBLANClimate(
|
||||
BSBLANEntity, CoordinatorEntity[DataUpdateCoordinator[State]], ClimateEntity
|
||||
):
|
||||
"""Defines a BSBLAN climate device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -69,22 +80,30 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
_attr_preset_modes = PRESET_MODES
|
||||
|
||||
# Determine hvac modes
|
||||
_attr_hvac_modes = HVAC_MODES
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: BSBLanData,
|
||||
coordinator: DataUpdateCoordinator[State],
|
||||
client: BSBLAN,
|
||||
device: Device,
|
||||
info: Info,
|
||||
static: StaticState,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize BSBLAN climate device."""
|
||||
super().__init__(data.coordinator, data)
|
||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||
super().__init__(client, device, info, static, entry)
|
||||
CoordinatorEntity.__init__(self, coordinator)
|
||||
self._attr_unique_id = f"{format_mac(device.MAC)}-climate"
|
||||
|
||||
self._attr_min_temp = float(data.static.min_temp.value)
|
||||
self._attr_max_temp = float(data.static.max_temp.value)
|
||||
if data.static.min_temp.unit in ("°C", "°C"):
|
||||
self._attr_min_temp = float(static.min_temp.value)
|
||||
self._attr_max_temp = float(static.max_temp.value)
|
||||
# check if self.coordinator.data.current_temperature.unit is "°C" or "°C"
|
||||
if self.coordinator.data.current_temperature.unit in ("°C", "°C"):
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
else:
|
||||
self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
@@ -92,30 +111,30 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if self.coordinator.data.state.current_temperature.value == "---":
|
||||
if self.coordinator.data.current_temperature.value == "---":
|
||||
# device returns no current temperature
|
||||
return None
|
||||
|
||||
return float(self.coordinator.data.state.current_temperature.value)
|
||||
return float(self.coordinator.data.current_temperature.value)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return float(self.coordinator.data.state.target_temperature.value)
|
||||
return float(self.coordinator.data.target_temperature.value)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if self.coordinator.data.state.hvac_mode.value == PRESET_ECO:
|
||||
if self.coordinator.data.hvac_mode.value == PRESET_ECO:
|
||||
return HVACMode.AUTO
|
||||
return try_parse_enum(HVACMode, self.coordinator.data.state.hvac_mode.value)
|
||||
return try_parse_enum(HVACMode, self.coordinator.data.hvac_mode.value)
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
if (
|
||||
self.hvac_mode == HVACMode.AUTO
|
||||
and self.coordinator.data.state.hvac_mode.value == PRESET_ECO
|
||||
and self.coordinator.data.hvac_mode.value == PRESET_ECO
|
||||
):
|
||||
return PRESET_ECO
|
||||
return PRESET_NONE
|
||||
@@ -154,7 +173,7 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
else:
|
||||
data[ATTR_HVAC_MODE] = kwargs[ATTR_PRESET_MODE]
|
||||
try:
|
||||
await self.coordinator.client.thermostat(**data)
|
||||
await self.client.thermostat(**data)
|
||||
except BSBLANError as err:
|
||||
raise HomeAssistantError(
|
||||
"An error occurred while updating the BSBLAN device",
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bsblan import BSBLAN, BSBLANConfig, BSBLANError
|
||||
from bsblan import BSBLAN, BSBLANError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -80,15 +80,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None:
|
||||
"""Get device information from an BSBLAN device."""
|
||||
config = BSBLANConfig(
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(
|
||||
host=self.host,
|
||||
passkey=self.passkey,
|
||||
port=self.port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
passkey=self.passkey,
|
||||
port=self.port,
|
||||
session=session,
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
bsblan = BSBLAN(config, session)
|
||||
device = await bsblan.device()
|
||||
self.mac = device.MAC
|
||||
|
||||
|
@@ -21,4 +21,6 @@ ATTR_OUTSIDE_TEMPERATURE: Final = "outside_temperature"
|
||||
|
||||
CONF_PASSKEY: Final = "passkey"
|
||||
|
||||
CONF_DEVICE_IDENT: Final = "RVS21.831F/127"
|
||||
|
||||
DEFAULT_PORT: Final = 80
|
||||
|
@@ -1,10 +1,12 @@
|
||||
"""DataUpdateCoordinator for the BSB-Lan integration."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from random import randint
|
||||
|
||||
from bsblan import BSBLAN, BSBLANConnectionError, State
|
||||
from bsblan import BSBLAN, BSBLANConnectionError
|
||||
from bsblan.models import State
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
@@ -14,14 +16,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
|
||||
|
||||
@dataclass
|
||||
class BSBLanCoordinatorData:
|
||||
"""BSBLan data stored in the Home Assistant data object."""
|
||||
|
||||
state: State
|
||||
|
||||
|
||||
class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
|
||||
class BSBLanUpdateCoordinator(DataUpdateCoordinator[State]):
|
||||
"""The BSB-Lan update coordinator."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
@@ -33,32 +28,30 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
|
||||
client: BSBLAN,
|
||||
) -> None:
|
||||
"""Initialize the BSB-Lan coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=f"{DOMAIN}_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=self._get_update_interval(),
|
||||
)
|
||||
|
||||
self.client = client
|
||||
|
||||
def _get_update_interval(self) -> timedelta:
|
||||
"""Get the update interval with a random offset.
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"{DOMAIN}_{config_entry.data[CONF_HOST]}",
|
||||
# use the default scan interval and add a random number of seconds to avoid timeouts when
|
||||
# the BSB-Lan device is already/still busy retrieving data,
|
||||
# e.g. for MQTT or internal logging.
|
||||
update_interval=SCAN_INTERVAL + timedelta(seconds=randint(1, 8)),
|
||||
)
|
||||
|
||||
Use the default scan interval and add a random number of seconds to avoid timeouts when
|
||||
the BSB-Lan device is already/still busy retrieving data,
|
||||
e.g. for MQTT or internal logging.
|
||||
"""
|
||||
return SCAN_INTERVAL + timedelta(seconds=randint(1, 8))
|
||||
async def _async_update_data(self) -> State:
|
||||
"""Get state from BSB-Lan device."""
|
||||
|
||||
# use the default scan interval and add a random number of seconds to avoid timeouts when
|
||||
# the BSB-Lan device is already/still busy retrieving data, e.g. for MQTT or internal logging.
|
||||
self.update_interval = SCAN_INTERVAL + timedelta(seconds=randint(1, 8))
|
||||
|
||||
async def _async_update_data(self) -> BSBLanCoordinatorData:
|
||||
"""Get state and sensor data from BSB-Lan device."""
|
||||
try:
|
||||
state = await self.client.state()
|
||||
return await self.client.state()
|
||||
except BSBLANConnectionError as err:
|
||||
host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown"
|
||||
raise UpdateFailed(
|
||||
f"Error while establishing connection with BSB-Lan device at {host}"
|
||||
f"Error while establishing connection with "
|
||||
f"BSB-Lan device at {self.config_entry.data[CONF_HOST]}"
|
||||
) from err
|
||||
|
||||
self.update_interval = self._get_update_interval()
|
||||
return BSBLanCoordinatorData(state=state)
|
||||
|
@@ -7,7 +7,7 @@ from typing import Any
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BSBLanData
|
||||
from . import HomeAssistantBSBLANData
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@@ -15,10 +15,9 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
data: BSBLanData = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id]
|
||||
return {
|
||||
"info": data.info.to_dict(),
|
||||
"device": data.device.to_dict(),
|
||||
"state": data.coordinator.data.state.to_dict(),
|
||||
"info": data.info.dict(),
|
||||
"device": data.device.dict(),
|
||||
"state": data.coordinator.data.dict(),
|
||||
}
|
||||
|
@@ -1,35 +1,41 @@
|
||||
"""BSBLan base entity."""
|
||||
"""Base entity for the BSBLAN integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bsblan import BSBLAN, Device, Info, StaticState
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import BSBLanData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BSBLanUpdateCoordinator
|
||||
|
||||
|
||||
class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]):
|
||||
"""Defines a base BSBLan entity."""
|
||||
class BSBLANEntity(Entity):
|
||||
"""Defines a BSBLAN entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
def __init__(
|
||||
self,
|
||||
client: BSBLAN,
|
||||
device: Device,
|
||||
info: Info,
|
||||
static: StaticState,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize an BSBLAN entity."""
|
||||
self.client = client
|
||||
|
||||
def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity."""
|
||||
super().__init__(coordinator, data)
|
||||
host = self.coordinator.config_entry.data["host"]
|
||||
mac = self.coordinator.config_entry.data["mac"]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device.MAC)},
|
||||
connections={(CONNECTION_NETWORK_MAC, format_mac(mac))},
|
||||
name=data.device.name,
|
||||
connections={(CONNECTION_NETWORK_MAC, format_mac(device.MAC))},
|
||||
identifiers={(DOMAIN, format_mac(device.MAC))},
|
||||
manufacturer="BSBLAN Inc.",
|
||||
model=data.info.device_identification.value,
|
||||
sw_version=data.device.version,
|
||||
configuration_url=f"http://{host}",
|
||||
model=info.device_identification.value,
|
||||
name=device.name,
|
||||
sw_version=f"{device.version})",
|
||||
configuration_url=f"http://{entry.data[CONF_HOST]}",
|
||||
)
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==0.6.2"]
|
||||
"requirements": ["python-bsblan==0.5.18"]
|
||||
}
|
||||
|
@@ -51,7 +51,7 @@ def _create_device(data):
|
||||
return _Device(ip_address, mac, host, status, name)
|
||||
|
||||
|
||||
_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"]) # noqa: PYI024
|
||||
_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"])
|
||||
|
||||
|
||||
class BTSmartHubScanner(DeviceScanner):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user