mirror of
https://github.com/home-assistant/core.git
synced 2025-07-27 07:07:28 +00:00
2024.8.0 (#123276)
This commit is contained in:
commit
86722ba05e
@ -49,6 +49,7 @@ base_platforms: &base_platforms
|
|||||||
- homeassistant/components/tts/**
|
- homeassistant/components/tts/**
|
||||||
- homeassistant/components/update/**
|
- homeassistant/components/update/**
|
||||||
- homeassistant/components/vacuum/**
|
- homeassistant/components/vacuum/**
|
||||||
|
- homeassistant/components/valve/**
|
||||||
- homeassistant/components/water_heater/**
|
- homeassistant/components/water_heater/**
|
||||||
- homeassistant/components/weather/**
|
- homeassistant/components/weather/**
|
||||||
|
|
||||||
|
1733
.coveragerc
1733
.coveragerc
File diff suppressed because it is too large
Load Diff
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -74,7 +74,6 @@ If the code communicates with devices, web services, or third-party tools:
|
|||||||
- [ ] New or updated dependencies have been added to `requirements_all.txt`.
|
- [ ] New or updated dependencies have been added to `requirements_all.txt`.
|
||||||
Updated by running `python3 -m script.gen_requirements_all`.
|
Updated by running `python3 -m script.gen_requirements_all`.
|
||||||
- [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description.
|
- [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description.
|
||||||
- [ ] Untested files have been added to `.coveragerc`.
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
This project is very active and we have a high turnover of pull requests.
|
This project is very active and we have a high turnover of pull requests.
|
||||||
|
20
.github/workflows/builder.yml
vendored
20
.github/workflows/builder.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ jobs:
|
|||||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||||
|
|
||||||
- name: Upload translations
|
- name: Upload translations
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
path: translations.tar.gz
|
path: translations.tar.gz
|
||||||
@ -116,7 +116,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ jobs:
|
|||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@ -190,7 +190,7 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.2.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -256,7 +256,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.2.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -329,14 +329,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@v3.2.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@v3.2.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -453,12 +453,12 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
|
137
.github/workflows/ci.yaml
vendored
137
.github/workflows/ci.yaml
vendored
@ -36,7 +36,7 @@ env:
|
|||||||
CACHE_VERSION: 9
|
CACHE_VERSION: 9
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 8
|
MYPY_CACHE_VERSION: 8
|
||||||
HA_SHORT_VERSION: "2024.7"
|
HA_SHORT_VERSION: "2024.8"
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.12"
|
||||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@ -86,7 +86,7 @@ jobs:
|
|||||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||||
tests: ${{ steps.info.outputs.tests }}
|
tests: ${{ steps.info.outputs.tests }}
|
||||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
@ -218,7 +218,7 @@ jobs:
|
|||||||
|
|
||||||
pre-commit:
|
pre-commit:
|
||||||
name: Prepare pre-commit base
|
name: Prepare pre-commit base
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
github.event.inputs.pylint-only != 'true'
|
github.event.inputs.pylint-only != 'true'
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
@ -229,7 +229,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -266,7 +266,7 @@ jobs:
|
|||||||
|
|
||||||
lint-ruff-format:
|
lint-ruff-format:
|
||||||
name: Check ruff-format
|
name: Check ruff-format
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- pre-commit
|
- pre-commit
|
||||||
@ -274,7 +274,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
@ -306,7 +306,7 @@ jobs:
|
|||||||
|
|
||||||
lint-ruff:
|
lint-ruff:
|
||||||
name: Check ruff
|
name: Check ruff
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- pre-commit
|
- pre-commit
|
||||||
@ -314,7 +314,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
@ -345,7 +345,7 @@ jobs:
|
|||||||
RUFF_OUTPUT_FORMAT: github
|
RUFF_OUTPUT_FORMAT: github
|
||||||
lint-other:
|
lint-other:
|
||||||
name: Check other linters
|
name: Check other linters
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- pre-commit
|
- pre-commit
|
||||||
@ -353,7 +353,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
@ -437,7 +437,7 @@ jobs:
|
|||||||
|
|
||||||
base:
|
base:
|
||||||
name: Prepare dependencies
|
name: Prepare dependencies
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs: info
|
needs: info
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
strategy:
|
strategy:
|
||||||
@ -448,7 +448,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -514,7 +514,7 @@ jobs:
|
|||||||
|
|
||||||
hassfest:
|
hassfest:
|
||||||
name: Check hassfest
|
name: Check hassfest
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
github.event.inputs.pylint-only != 'true'
|
github.event.inputs.pylint-only != 'true'
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
@ -522,11 +522,17 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
|
- name: Install additional OS dependencies
|
||||||
|
run: |
|
||||||
|
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get -y install \
|
||||||
|
libturbojpeg
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -546,7 +552,7 @@ jobs:
|
|||||||
|
|
||||||
gen-requirements-all:
|
gen-requirements-all:
|
||||||
name: Check all requirements
|
name: Check all requirements
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
github.event.inputs.pylint-only != 'true'
|
github.event.inputs.pylint-only != 'true'
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
@ -558,7 +564,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -576,9 +582,49 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.gen_requirements_all validate
|
python -m script.gen_requirements_all validate
|
||||||
|
|
||||||
|
audit-licenses:
|
||||||
|
name: Audit licenses
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- info
|
||||||
|
- base
|
||||||
|
if: |
|
||||||
|
needs.info.outputs.requirements == 'true'
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.1.7
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
id: python
|
||||||
|
uses: actions/setup-python@v5.1.1
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
check-latest: true
|
||||||
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache/restore@v4.0.2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
fail-on-cache-miss: true
|
||||||
|
key: >-
|
||||||
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
|
needs.info.outputs.python_cache_key }}
|
||||||
|
- name: Run pip-licenses
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pip-licenses --format=json --output-file=licenses.json
|
||||||
|
- name: Upload licenses
|
||||||
|
uses: actions/upload-artifact@v4.3.4
|
||||||
|
with:
|
||||||
|
name: licenses
|
||||||
|
path: licenses.json
|
||||||
|
- name: Process licenses
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
python -m script.licenses
|
||||||
|
|
||||||
pylint:
|
pylint:
|
||||||
name: Check pylint
|
name: Check pylint
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
if: |
|
if: |
|
||||||
github.event.inputs.mypy-only != 'true'
|
github.event.inputs.mypy-only != 'true'
|
||||||
@ -591,7 +637,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -623,7 +669,7 @@ jobs:
|
|||||||
|
|
||||||
pylint-tests:
|
pylint-tests:
|
||||||
name: Check pylint on tests
|
name: Check pylint on tests
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
if: |
|
if: |
|
||||||
(github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true')
|
(github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true')
|
||||||
@ -636,7 +682,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -668,7 +714,7 @@ jobs:
|
|||||||
|
|
||||||
mypy:
|
mypy:
|
||||||
name: Check mypy
|
name: Check mypy
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
github.event.inputs.pylint-only != 'true'
|
github.event.inputs.pylint-only != 'true'
|
||||||
|| github.event.inputs.mypy-only == 'true'
|
|| github.event.inputs.mypy-only == 'true'
|
||||||
@ -680,7 +726,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -729,7 +775,7 @@ jobs:
|
|||||||
mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }}
|
||||||
|
|
||||||
prepare-pytest-full:
|
prepare-pytest-full:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||||
&& github.event.inputs.lint-only != 'true'
|
&& github.event.inputs.lint-only != 'true'
|
||||||
@ -754,7 +800,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -772,14 +818,14 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||||
- name: Upload pytest_buckets
|
- name: Upload pytest_buckets
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
path: pytest_buckets.txt
|
path: pytest_buckets.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
pytest-full:
|
pytest-full:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||||
&& github.event.inputs.lint-only != 'true'
|
&& github.event.inputs.lint-only != 'true'
|
||||||
@ -817,7 +863,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -836,7 +882,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||||
- name: Download pytest_buckets
|
- name: Download pytest_buckets
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
- name: Compile English translations
|
- name: Compile English translations
|
||||||
@ -858,6 +904,7 @@ jobs:
|
|||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
@ -871,14 +918,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@ -890,7 +937,7 @@ jobs:
|
|||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
|
|
||||||
pytest-mariadb:
|
pytest-mariadb:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
services:
|
services:
|
||||||
mariadb:
|
mariadb:
|
||||||
image: ${{ matrix.mariadb-group }}
|
image: ${{ matrix.mariadb-group }}
|
||||||
@ -935,7 +982,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -996,7 +1043,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1004,7 +1051,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1060,7 +1107,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -1122,7 +1169,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1130,7 +1177,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1143,7 +1190,7 @@ jobs:
|
|||||||
coverage-full:
|
coverage-full:
|
||||||
name: Upload test coverage to Codecov (full suite)
|
name: Upload test coverage to Codecov (full suite)
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- pytest-full
|
- pytest-full
|
||||||
@ -1154,7 +1201,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
@ -1167,7 +1214,7 @@ jobs:
|
|||||||
version: v0.6.0
|
version: v0.6.0
|
||||||
|
|
||||||
pytest-partial:
|
pytest-partial:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
||||||
&& github.event.inputs.lint-only != 'true'
|
&& github.event.inputs.lint-only != 'true'
|
||||||
@ -1205,7 +1252,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -1263,14 +1310,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@ -1282,7 +1329,7 @@ jobs:
|
|||||||
coverage-partial:
|
coverage-partial:
|
||||||
name: Upload test coverage to Codecov (partial suite)
|
name: Upload test coverage to Codecov (partial suite)
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- pytest-partial
|
- pytest-partial
|
||||||
@ -1291,7 +1338,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.25.10
|
uses: github/codeql-action/init@v3.25.15
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.25.10
|
uses: github/codeql-action/analyze@v3.25.15
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
28
.github/workflows/wheels.yml
vendored
28
.github/workflows/wheels.yml
vendored
@ -36,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.1.0
|
uses: actions/setup-python@v5.1.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -82,14 +82,14 @@ jobs:
|
|||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@ -101,7 +101,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@v4.3.3
|
uses: actions/upload-artifact@v4.3.4
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@ -121,17 +121,17 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -159,17 +159,17 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@v4.1.7
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@ -203,7 +203,7 @@ jobs:
|
|||||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||||
|
|
||||||
- name: Build wheels (old cython)
|
- name: Build wheels (old cython)
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -218,7 +218,7 @@ jobs:
|
|||||||
pip: "'cython<3'"
|
pip: "'cython<3'"
|
||||||
|
|
||||||
- name: Build wheels (part 1)
|
- name: Build wheels (part 1)
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -232,7 +232,7 @@ jobs:
|
|||||||
requirements: "requirements_all.txtaa"
|
requirements: "requirements_all.txtaa"
|
||||||
|
|
||||||
- name: Build wheels (part 2)
|
- name: Build wheels (part 2)
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -246,7 +246,7 @@ jobs:
|
|||||||
requirements: "requirements_all.txtab"
|
requirements: "requirements_all.txtab"
|
||||||
|
|
||||||
- name: Build wheels (part 3)
|
- name: Build wheels (part 3)
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.07.1
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.4.9
|
rev: v0.5.5
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
@ -83,7 +83,7 @@ repos:
|
|||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$
|
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$
|
||||||
- id: hassfest-metadata
|
- id: hassfest-metadata
|
||||||
name: hassfest-metadata
|
name: hassfest-metadata
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||||
|
@ -21,6 +21,7 @@ homeassistant.helpers.entity_platform
|
|||||||
homeassistant.helpers.entity_values
|
homeassistant.helpers.entity_values
|
||||||
homeassistant.helpers.event
|
homeassistant.helpers.event
|
||||||
homeassistant.helpers.reload
|
homeassistant.helpers.reload
|
||||||
|
homeassistant.helpers.script
|
||||||
homeassistant.helpers.script_variables
|
homeassistant.helpers.script_variables
|
||||||
homeassistant.helpers.singleton
|
homeassistant.helpers.singleton
|
||||||
homeassistant.helpers.sun
|
homeassistant.helpers.sun
|
||||||
@ -97,6 +98,7 @@ homeassistant.components.assist_pipeline.*
|
|||||||
homeassistant.components.asterisk_cdr.*
|
homeassistant.components.asterisk_cdr.*
|
||||||
homeassistant.components.asterisk_mbox.*
|
homeassistant.components.asterisk_mbox.*
|
||||||
homeassistant.components.asuswrt.*
|
homeassistant.components.asuswrt.*
|
||||||
|
homeassistant.components.autarco.*
|
||||||
homeassistant.components.auth.*
|
homeassistant.components.auth.*
|
||||||
homeassistant.components.automation.*
|
homeassistant.components.automation.*
|
||||||
homeassistant.components.awair.*
|
homeassistant.components.awair.*
|
||||||
@ -118,6 +120,7 @@ homeassistant.components.bond.*
|
|||||||
homeassistant.components.braviatv.*
|
homeassistant.components.braviatv.*
|
||||||
homeassistant.components.brother.*
|
homeassistant.components.brother.*
|
||||||
homeassistant.components.browser.*
|
homeassistant.components.browser.*
|
||||||
|
homeassistant.components.bryant_evolution.*
|
||||||
homeassistant.components.bthome.*
|
homeassistant.components.bthome.*
|
||||||
homeassistant.components.button.*
|
homeassistant.components.button.*
|
||||||
homeassistant.components.calendar.*
|
homeassistant.components.calendar.*
|
||||||
@ -165,6 +168,7 @@ homeassistant.components.ecowitt.*
|
|||||||
homeassistant.components.efergy.*
|
homeassistant.components.efergy.*
|
||||||
homeassistant.components.electrasmart.*
|
homeassistant.components.electrasmart.*
|
||||||
homeassistant.components.electric_kiwi.*
|
homeassistant.components.electric_kiwi.*
|
||||||
|
homeassistant.components.elevenlabs.*
|
||||||
homeassistant.components.elgato.*
|
homeassistant.components.elgato.*
|
||||||
homeassistant.components.elkm1.*
|
homeassistant.components.elkm1.*
|
||||||
homeassistant.components.emulated_hue.*
|
homeassistant.components.emulated_hue.*
|
||||||
@ -253,6 +257,7 @@ homeassistant.components.integration.*
|
|||||||
homeassistant.components.intent.*
|
homeassistant.components.intent.*
|
||||||
homeassistant.components.intent_script.*
|
homeassistant.components.intent_script.*
|
||||||
homeassistant.components.ios.*
|
homeassistant.components.ios.*
|
||||||
|
homeassistant.components.iotty.*
|
||||||
homeassistant.components.ipp.*
|
homeassistant.components.ipp.*
|
||||||
homeassistant.components.iqvia.*
|
homeassistant.components.iqvia.*
|
||||||
homeassistant.components.islamic_prayer_times.*
|
homeassistant.components.islamic_prayer_times.*
|
||||||
@ -277,6 +282,7 @@ homeassistant.components.lidarr.*
|
|||||||
homeassistant.components.lifx.*
|
homeassistant.components.lifx.*
|
||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
homeassistant.components.linear_garage_door.*
|
homeassistant.components.linear_garage_door.*
|
||||||
|
homeassistant.components.linkplay.*
|
||||||
homeassistant.components.litejet.*
|
homeassistant.components.litejet.*
|
||||||
homeassistant.components.litterrobot.*
|
homeassistant.components.litterrobot.*
|
||||||
homeassistant.components.local_ip.*
|
homeassistant.components.local_ip.*
|
||||||
@ -287,6 +293,7 @@ homeassistant.components.logger.*
|
|||||||
homeassistant.components.london_underground.*
|
homeassistant.components.london_underground.*
|
||||||
homeassistant.components.lookin.*
|
homeassistant.components.lookin.*
|
||||||
homeassistant.components.luftdaten.*
|
homeassistant.components.luftdaten.*
|
||||||
|
homeassistant.components.madvr.*
|
||||||
homeassistant.components.mailbox.*
|
homeassistant.components.mailbox.*
|
||||||
homeassistant.components.map.*
|
homeassistant.components.map.*
|
||||||
homeassistant.components.mastodon.*
|
homeassistant.components.mastodon.*
|
||||||
@ -382,6 +389,7 @@ homeassistant.components.samsungtv.*
|
|||||||
homeassistant.components.scene.*
|
homeassistant.components.scene.*
|
||||||
homeassistant.components.schedule.*
|
homeassistant.components.schedule.*
|
||||||
homeassistant.components.scrape.*
|
homeassistant.components.scrape.*
|
||||||
|
homeassistant.components.script.*
|
||||||
homeassistant.components.search.*
|
homeassistant.components.search.*
|
||||||
homeassistant.components.select.*
|
homeassistant.components.select.*
|
||||||
homeassistant.components.sensibo.*
|
homeassistant.components.sensibo.*
|
||||||
|
36
.vscode/launch.json
vendored
36
.vscode/launch.json
vendored
@ -6,38 +6,52 @@
|
|||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Home Assistant",
|
"name": "Home Assistant",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "homeassistant",
|
"module": "homeassistant",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
"args": ["--debug", "-c", "config"],
|
"args": [
|
||||||
|
"--debug",
|
||||||
|
"-c",
|
||||||
|
"config"
|
||||||
|
],
|
||||||
"preLaunchTask": "Compile English translations"
|
"preLaunchTask": "Compile English translations"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Home Assistant (skip pip)",
|
"name": "Home Assistant (skip pip)",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "homeassistant",
|
"module": "homeassistant",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
"args": ["--debug", "-c", "config", "--skip-pip"],
|
"args": [
|
||||||
|
"--debug",
|
||||||
|
"-c",
|
||||||
|
"config",
|
||||||
|
"--skip-pip"
|
||||||
|
],
|
||||||
"preLaunchTask": "Compile English translations"
|
"preLaunchTask": "Compile English translations"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Home Assistant: Changed tests",
|
"name": "Home Assistant: Changed tests",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "pytest",
|
"module": "pytest",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
"args": ["--timeout=10", "--picked"],
|
"args": [
|
||||||
|
"--timeout=10",
|
||||||
|
"--picked"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
||||||
// See https://www.home-assistant.io/integrations/debugpy/
|
// See https://www.home-assistant.io/integrations/debugpy/
|
||||||
"name": "Home Assistant: Attach Local",
|
"name": "Home Assistant: Attach Local",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
|
"connect": {
|
||||||
"port": 5678,
|
"port": 5678,
|
||||||
"host": "localhost",
|
"host": "localhost"
|
||||||
|
},
|
||||||
"pathMappings": [
|
"pathMappings": [
|
||||||
{
|
{
|
||||||
"localRoot": "${workspaceFolder}",
|
"localRoot": "${workspaceFolder}",
|
||||||
@ -49,10 +63,12 @@
|
|||||||
// Debug by attaching to remote Home Assistant server using Remote Python Debugger.
|
// Debug by attaching to remote Home Assistant server using Remote Python Debugger.
|
||||||
// See https://www.home-assistant.io/integrations/debugpy/
|
// See https://www.home-assistant.io/integrations/debugpy/
|
||||||
"name": "Home Assistant: Attach Remote",
|
"name": "Home Assistant: Attach Remote",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
|
"connect": {
|
||||||
"port": 5678,
|
"port": 5678,
|
||||||
"host": "homeassistant.local",
|
"host": "homeassistant.local"
|
||||||
|
},
|
||||||
"pathMappings": [
|
"pathMappings": [
|
||||||
{
|
{
|
||||||
"localRoot": "${workspaceFolder}",
|
"localRoot": "${workspaceFolder}",
|
||||||
|
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@ -76,6 +76,7 @@
|
|||||||
"detail": "Generate code coverage report for a given integration.",
|
"detail": "Generate code coverage report for a given integration.",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||||
|
"dependsOn": ["Compile English translations"],
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
58
CODEOWNERS
58
CODEOWNERS
@ -155,6 +155,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/aurora_abb_powerone/ @davet2001
|
/tests/components/aurora_abb_powerone/ @davet2001
|
||||||
/homeassistant/components/aussie_broadband/ @nickw444 @Bre77
|
/homeassistant/components/aussie_broadband/ @nickw444 @Bre77
|
||||||
/tests/components/aussie_broadband/ @nickw444 @Bre77
|
/tests/components/aussie_broadband/ @nickw444 @Bre77
|
||||||
|
/homeassistant/components/autarco/ @klaasnicolaas
|
||||||
|
/tests/components/autarco/ @klaasnicolaas
|
||||||
/homeassistant/components/auth/ @home-assistant/core
|
/homeassistant/components/auth/ @home-assistant/core
|
||||||
/tests/components/auth/ @home-assistant/core
|
/tests/components/auth/ @home-assistant/core
|
||||||
/homeassistant/components/automation/ @home-assistant/core
|
/homeassistant/components/automation/ @home-assistant/core
|
||||||
@ -195,7 +197,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/bluemaestro/ @bdraco
|
/tests/components/bluemaestro/ @bdraco
|
||||||
/homeassistant/components/blueprint/ @home-assistant/core
|
/homeassistant/components/blueprint/ @home-assistant/core
|
||||||
/tests/components/blueprint/ @home-assistant/core
|
/tests/components/blueprint/ @home-assistant/core
|
||||||
/homeassistant/components/bluesound/ @thrawnarn
|
/homeassistant/components/bluesound/ @thrawnarn @LouisChrist
|
||||||
|
/tests/components/bluesound/ @thrawnarn @LouisChrist
|
||||||
/homeassistant/components/bluetooth/ @bdraco
|
/homeassistant/components/bluetooth/ @bdraco
|
||||||
/tests/components/bluetooth/ @bdraco
|
/tests/components/bluetooth/ @bdraco
|
||||||
/homeassistant/components/bluetooth_adapters/ @bdraco
|
/homeassistant/components/bluetooth_adapters/ @bdraco
|
||||||
@ -218,6 +221,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/brottsplatskartan/ @gjohansson-ST
|
/tests/components/brottsplatskartan/ @gjohansson-ST
|
||||||
/homeassistant/components/brunt/ @eavanvalkenburg
|
/homeassistant/components/brunt/ @eavanvalkenburg
|
||||||
/tests/components/brunt/ @eavanvalkenburg
|
/tests/components/brunt/ @eavanvalkenburg
|
||||||
|
/homeassistant/components/bryant_evolution/ @danielsmyers
|
||||||
|
/tests/components/bryant_evolution/ @danielsmyers
|
||||||
/homeassistant/components/bsblan/ @liudger
|
/homeassistant/components/bsblan/ @liudger
|
||||||
/tests/components/bsblan/ @liudger
|
/tests/components/bsblan/ @liudger
|
||||||
/homeassistant/components/bt_smarthub/ @typhoon2099
|
/homeassistant/components/bt_smarthub/ @typhoon2099
|
||||||
@ -237,6 +242,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ccm15/ @ocalvo
|
/tests/components/ccm15/ @ocalvo
|
||||||
/homeassistant/components/cert_expiry/ @jjlawren
|
/homeassistant/components/cert_expiry/ @jjlawren
|
||||||
/tests/components/cert_expiry/ @jjlawren
|
/tests/components/cert_expiry/ @jjlawren
|
||||||
|
/homeassistant/components/chacon_dio/ @cnico
|
||||||
|
/tests/components/chacon_dio/ @cnico
|
||||||
/homeassistant/components/cisco_ios/ @fbradyirl
|
/homeassistant/components/cisco_ios/ @fbradyirl
|
||||||
/homeassistant/components/cisco_mobility_express/ @fbradyirl
|
/homeassistant/components/cisco_mobility_express/ @fbradyirl
|
||||||
/homeassistant/components/cisco_webex_teams/ @fbradyirl
|
/homeassistant/components/cisco_webex_teams/ @fbradyirl
|
||||||
@ -358,8 +365,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ecoforest/ @pjanuario
|
/tests/components/ecoforest/ @pjanuario
|
||||||
/homeassistant/components/econet/ @w1ll1am23
|
/homeassistant/components/econet/ @w1ll1am23
|
||||||
/tests/components/econet/ @w1ll1am23
|
/tests/components/econet/ @w1ll1am23
|
||||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
|
/homeassistant/components/ecovacs/ @mib1185 @edenhaus @Augar
|
||||||
/tests/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
|
/tests/components/ecovacs/ @mib1185 @edenhaus @Augar
|
||||||
/homeassistant/components/ecowitt/ @pvizeli
|
/homeassistant/components/ecowitt/ @pvizeli
|
||||||
/tests/components/ecowitt/ @pvizeli
|
/tests/components/ecowitt/ @pvizeli
|
||||||
/homeassistant/components/efergy/ @tkdrob
|
/homeassistant/components/efergy/ @tkdrob
|
||||||
@ -369,6 +376,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/electrasmart/ @jafar-atili
|
/tests/components/electrasmart/ @jafar-atili
|
||||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||||
/tests/components/electric_kiwi/ @mikey0000
|
/tests/components/electric_kiwi/ @mikey0000
|
||||||
|
/homeassistant/components/elevenlabs/ @sorgfresser
|
||||||
|
/tests/components/elevenlabs/ @sorgfresser
|
||||||
/homeassistant/components/elgato/ @frenck
|
/homeassistant/components/elgato/ @frenck
|
||||||
/tests/components/elgato/ @frenck
|
/tests/components/elgato/ @frenck
|
||||||
/homeassistant/components/elkm1/ @gwww @bdraco
|
/homeassistant/components/elkm1/ @gwww @bdraco
|
||||||
@ -380,6 +389,7 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/elvia/ @ludeeus
|
/tests/components/elvia/ @ludeeus
|
||||||
/homeassistant/components/emby/ @mezz64
|
/homeassistant/components/emby/ @mezz64
|
||||||
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
||||||
|
/tests/components/emoncms/ @borpin @alexandrecuer
|
||||||
/homeassistant/components/emonitor/ @bdraco
|
/homeassistant/components/emonitor/ @bdraco
|
||||||
/tests/components/emonitor/ @bdraco
|
/tests/components/emonitor/ @bdraco
|
||||||
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
||||||
@ -396,8 +406,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/enigma2/ @autinerd
|
/tests/components/enigma2/ @autinerd
|
||||||
/homeassistant/components/enocean/ @bdurrer
|
/homeassistant/components/enocean/ @bdurrer
|
||||||
/tests/components/enocean/ @bdurrer
|
/tests/components/enocean/ @bdurrer
|
||||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||||
@ -429,6 +439,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/fan/ @home-assistant/core
|
/tests/components/fan/ @home-assistant/core
|
||||||
/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna
|
/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||||
/tests/components/fastdotcom/ @rohankapoorcom @erwindouna
|
/tests/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||||
|
/homeassistant/components/feedreader/ @mib1185
|
||||||
|
/tests/components/feedreader/ @mib1185
|
||||||
/homeassistant/components/fibaro/ @rappenze
|
/homeassistant/components/fibaro/ @rappenze
|
||||||
/tests/components/fibaro/ @rappenze
|
/tests/components/fibaro/ @rappenze
|
||||||
/homeassistant/components/file/ @fabaff
|
/homeassistant/components/file/ @fabaff
|
||||||
@ -499,6 +511,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/generic_hygrostat/ @Shulyaka
|
/homeassistant/components/generic_hygrostat/ @Shulyaka
|
||||||
/tests/components/generic_hygrostat/ @Shulyaka
|
/tests/components/generic_hygrostat/ @Shulyaka
|
||||||
/homeassistant/components/geniushub/ @manzanotti
|
/homeassistant/components/geniushub/ @manzanotti
|
||||||
|
/tests/components/geniushub/ @manzanotti
|
||||||
/homeassistant/components/geo_json_events/ @exxamalte
|
/homeassistant/components/geo_json_events/ @exxamalte
|
||||||
/tests/components/geo_json_events/ @exxamalte
|
/tests/components/geo_json_events/ @exxamalte
|
||||||
/homeassistant/components/geo_location/ @home-assistant/core
|
/homeassistant/components/geo_location/ @home-assistant/core
|
||||||
@ -689,6 +702,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ios/ @robbiet480
|
/tests/components/ios/ @robbiet480
|
||||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||||
|
/homeassistant/components/iotty/ @pburgio
|
||||||
|
/tests/components/iotty/ @pburgio
|
||||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||||
/homeassistant/components/ipma/ @dgomes
|
/homeassistant/components/ipma/ @dgomes
|
||||||
/tests/components/ipma/ @dgomes
|
/tests/components/ipma/ @dgomes
|
||||||
@ -697,10 +712,14 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/iqvia/ @bachya
|
/homeassistant/components/iqvia/ @bachya
|
||||||
/tests/components/iqvia/ @bachya
|
/tests/components/iqvia/ @bachya
|
||||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||||
|
/homeassistant/components/iron_os/ @tr4nt0r
|
||||||
|
/tests/components/iron_os/ @tr4nt0r
|
||||||
/homeassistant/components/isal/ @bdraco
|
/homeassistant/components/isal/ @bdraco
|
||||||
/tests/components/isal/ @bdraco
|
/tests/components/isal/ @bdraco
|
||||||
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
|
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||||
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
|
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||||
|
/homeassistant/components/israel_rail/ @shaiu
|
||||||
|
/tests/components/israel_rail/ @shaiu
|
||||||
/homeassistant/components/iss/ @DurgNomis-drol
|
/homeassistant/components/iss/ @DurgNomis-drol
|
||||||
/tests/components/iss/ @DurgNomis-drol
|
/tests/components/iss/ @DurgNomis-drol
|
||||||
/homeassistant/components/ista_ecotrend/ @tr4nt0r
|
/homeassistant/components/ista_ecotrend/ @tr4nt0r
|
||||||
@ -735,8 +754,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/kitchen_sink/ @home-assistant/core
|
/tests/components/kitchen_sink/ @home-assistant/core
|
||||||
/homeassistant/components/kmtronic/ @dgomes
|
/homeassistant/components/kmtronic/ @dgomes
|
||||||
/tests/components/kmtronic/ @dgomes
|
/tests/components/kmtronic/ @dgomes
|
||||||
/homeassistant/components/knocki/ @joostlek @jgatto1
|
/homeassistant/components/knocki/ @joostlek @jgatto1 @JakeBosh
|
||||||
/tests/components/knocki/ @joostlek @jgatto1
|
/tests/components/knocki/ @joostlek @jgatto1 @JakeBosh
|
||||||
/homeassistant/components/knx/ @Julius2342 @farmio @marvin-w
|
/homeassistant/components/knx/ @Julius2342 @farmio @marvin-w
|
||||||
/tests/components/knx/ @Julius2342 @farmio @marvin-w
|
/tests/components/knx/ @Julius2342 @farmio @marvin-w
|
||||||
/homeassistant/components/kodi/ @OnFreund
|
/homeassistant/components/kodi/ @OnFreund
|
||||||
@ -783,6 +802,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/light/ @home-assistant/core
|
/tests/components/light/ @home-assistant/core
|
||||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
/homeassistant/components/linear_garage_door/ @IceBotYT
|
||||||
/tests/components/linear_garage_door/ @IceBotYT
|
/tests/components/linear_garage_door/ @IceBotYT
|
||||||
|
/homeassistant/components/linkplay/ @Velleman
|
||||||
|
/tests/components/linkplay/ @Velleman
|
||||||
/homeassistant/components/linux_battery/ @fabaff
|
/homeassistant/components/linux_battery/ @fabaff
|
||||||
/homeassistant/components/litejet/ @joncar
|
/homeassistant/components/litejet/ @joncar
|
||||||
/tests/components/litejet/ @joncar
|
/tests/components/litejet/ @joncar
|
||||||
@ -823,13 +844,16 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
||||||
/homeassistant/components/lyric/ @timmo001
|
/homeassistant/components/lyric/ @timmo001
|
||||||
/tests/components/lyric/ @timmo001
|
/tests/components/lyric/ @timmo001
|
||||||
/homeassistant/components/mastodon/ @fabaff
|
/homeassistant/components/madvr/ @iloveicedgreentea
|
||||||
|
/tests/components/madvr/ @iloveicedgreentea
|
||||||
|
/homeassistant/components/mastodon/ @fabaff @andrew-codechimp
|
||||||
|
/tests/components/mastodon/ @fabaff @andrew-codechimp
|
||||||
/homeassistant/components/matrix/ @PaarthShah
|
/homeassistant/components/matrix/ @PaarthShah
|
||||||
/tests/components/matrix/ @PaarthShah
|
/tests/components/matrix/ @PaarthShah
|
||||||
/homeassistant/components/matter/ @home-assistant/matter
|
/homeassistant/components/matter/ @home-assistant/matter
|
||||||
/tests/components/matter/ @home-assistant/matter
|
/tests/components/matter/ @home-assistant/matter
|
||||||
/homeassistant/components/mealie/ @joostlek
|
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||||
/tests/components/mealie/ @joostlek
|
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||||
/tests/components/meater/ @Sotolotl @emontnemery
|
/tests/components/meater/ @Sotolotl @emontnemery
|
||||||
/homeassistant/components/medcom_ble/ @elafargue
|
/homeassistant/components/medcom_ble/ @elafargue
|
||||||
@ -874,8 +898,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/moat/ @bdraco
|
/tests/components/moat/ @bdraco
|
||||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||||
/tests/components/mobile_app/ @home-assistant/core
|
/tests/components/mobile_app/ @home-assistant/core
|
||||||
/homeassistant/components/modbus/ @janiversen
|
|
||||||
/tests/components/modbus/ @janiversen
|
|
||||||
/homeassistant/components/modem_callerid/ @tkdrob
|
/homeassistant/components/modem_callerid/ @tkdrob
|
||||||
/tests/components/modem_callerid/ @tkdrob
|
/tests/components/modem_callerid/ @tkdrob
|
||||||
/homeassistant/components/modern_forms/ @wonderslug
|
/homeassistant/components/modern_forms/ @wonderslug
|
||||||
@ -987,8 +1009,6 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/ollama/ @synesthesiam
|
/homeassistant/components/ollama/ @synesthesiam
|
||||||
/tests/components/ollama/ @synesthesiam
|
/tests/components/ollama/ @synesthesiam
|
||||||
/homeassistant/components/ombi/ @larssont
|
/homeassistant/components/ombi/ @larssont
|
||||||
/homeassistant/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
|
||||||
/tests/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
|
||||||
/homeassistant/components/onboarding/ @home-assistant/core
|
/homeassistant/components/onboarding/ @home-assistant/core
|
||||||
/tests/components/onboarding/ @home-assistant/core
|
/tests/components/onboarding/ @home-assistant/core
|
||||||
/homeassistant/components/oncue/ @bdraco @peterager
|
/homeassistant/components/oncue/ @bdraco @peterager
|
||||||
@ -1033,8 +1053,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/otbr/ @home-assistant/core
|
/tests/components/otbr/ @home-assistant/core
|
||||||
/homeassistant/components/ourgroceries/ @OnFreund
|
/homeassistant/components/ourgroceries/ @OnFreund
|
||||||
/tests/components/ourgroceries/ @OnFreund
|
/tests/components/ourgroceries/ @OnFreund
|
||||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
|
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
|
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||||
/homeassistant/components/ovo_energy/ @timmo001
|
/homeassistant/components/ovo_energy/ @timmo001
|
||||||
/tests/components/ovo_energy/ @timmo001
|
/tests/components/ovo_energy/ @timmo001
|
||||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||||
@ -1204,6 +1224,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/rtsp_to_webrtc/ @allenporter
|
/tests/components/rtsp_to_webrtc/ @allenporter
|
||||||
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||||
|
/homeassistant/components/russound_rio/ @noahhusby
|
||||||
|
/tests/components/russound_rio/ @noahhusby
|
||||||
/homeassistant/components/ruuvi_gateway/ @akx
|
/homeassistant/components/ruuvi_gateway/ @akx
|
||||||
/tests/components/ruuvi_gateway/ @akx
|
/tests/components/ruuvi_gateway/ @akx
|
||||||
/homeassistant/components/ruuvitag_ble/ @akx
|
/homeassistant/components/ruuvitag_ble/ @akx
|
||||||
@ -1271,6 +1293,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/sighthound/ @robmarkcole
|
/tests/components/sighthound/ @robmarkcole
|
||||||
/homeassistant/components/signal_messenger/ @bbernhard
|
/homeassistant/components/signal_messenger/ @bbernhard
|
||||||
/tests/components/signal_messenger/ @bbernhard
|
/tests/components/signal_messenger/ @bbernhard
|
||||||
|
/homeassistant/components/simplefin/ @scottg489 @jeeftor
|
||||||
|
/tests/components/simplefin/ @scottg489 @jeeftor
|
||||||
/homeassistant/components/simplepush/ @engrbm87
|
/homeassistant/components/simplepush/ @engrbm87
|
||||||
/tests/components/simplepush/ @engrbm87
|
/tests/components/simplepush/ @engrbm87
|
||||||
/homeassistant/components/simplisafe/ @bachya
|
/homeassistant/components/simplisafe/ @bachya
|
||||||
@ -1422,6 +1446,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tellduslive/ @fredrike
|
/tests/components/tellduslive/ @fredrike
|
||||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||||
|
/homeassistant/components/tesla_fleet/ @Bre77
|
||||||
|
/tests/components/tesla_fleet/ @Bre77
|
||||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||||
/tests/components/tesla_wall_connector/ @einarhauks
|
/tests/components/tesla_wall_connector/ @einarhauks
|
||||||
/homeassistant/components/teslemetry/ @Bre77
|
/homeassistant/components/teslemetry/ @Bre77
|
||||||
|
@ -12,7 +12,7 @@ ENV \
|
|||||||
ARG QEMU_CPU
|
ARG QEMU_CPU
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.2.13
|
RUN pip3 install uv==0.2.27
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ coverage:
|
|||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
default:
|
default:
|
||||||
target: 90
|
target: auto
|
||||||
threshold: 0.09
|
threshold: 0.09
|
||||||
required:
|
required:
|
||||||
target: auto
|
target: auto
|
||||||
|
@ -363,15 +363,15 @@ class AuthManager:
|
|||||||
local_only: bool | None = None,
|
local_only: bool | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update a user."""
|
"""Update a user."""
|
||||||
kwargs: dict[str, Any] = {}
|
kwargs: dict[str, Any] = {
|
||||||
|
attr_name: value
|
||||||
for attr_name, value in (
|
for attr_name, value in (
|
||||||
("name", name),
|
("name", name),
|
||||||
("group_ids", group_ids),
|
("group_ids", group_ids),
|
||||||
("local_only", local_only),
|
("local_only", local_only),
|
||||||
):
|
)
|
||||||
if value is not None:
|
if value is not None
|
||||||
kwargs[attr_name] = value
|
}
|
||||||
await self._store.async_update_user(user, **kwargs)
|
await self._store.async_update_user(user, **kwargs)
|
||||||
|
|
||||||
if is_active is not None:
|
if is_active is not None:
|
||||||
|
@ -105,14 +105,18 @@ class AuthStore:
|
|||||||
"perm_lookup": self._perm_lookup,
|
"perm_lookup": self._perm_lookup,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
kwargs.update(
|
||||||
|
{
|
||||||
|
attr_name: value
|
||||||
for attr_name, value in (
|
for attr_name, value in (
|
||||||
("is_owner", is_owner),
|
("is_owner", is_owner),
|
||||||
("is_active", is_active),
|
("is_active", is_active),
|
||||||
("local_only", local_only),
|
("local_only", local_only),
|
||||||
("system_generated", system_generated),
|
("system_generated", system_generated),
|
||||||
):
|
)
|
||||||
if value is not None:
|
if value is not None
|
||||||
kwargs[attr_name] = value
|
}
|
||||||
|
)
|
||||||
|
|
||||||
new_user = models.User(**kwargs)
|
new_user = models.User(**kwargs)
|
||||||
|
|
||||||
|
@ -18,9 +18,12 @@ from homeassistant.const import (
|
|||||||
EVENT_THEMES_UPDATED,
|
EVENT_THEMES_UPDATED,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
|
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
|
||||||
|
from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED
|
||||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||||
|
from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED
|
||||||
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
|
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
|
||||||
|
from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED
|
||||||
from homeassistant.util.event_type import EventType
|
from homeassistant.util.event_type import EventType
|
||||||
|
|
||||||
# These are events that do not contain any sensitive data
|
# These are events that do not contain any sensitive data
|
||||||
@ -41,4 +44,7 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
|||||||
EVENT_SHOPPING_LIST_UPDATED,
|
EVENT_SHOPPING_LIST_UPDATED,
|
||||||
EVENT_STATE_CHANGED,
|
EVENT_STATE_CHANGED,
|
||||||
EVENT_THEMES_UPDATED,
|
EVENT_THEMES_UPDATED,
|
||||||
|
EVENT_LABEL_REGISTRY_UPDATED,
|
||||||
|
EVENT_CATEGORY_REGISTRY_UPDATED,
|
||||||
|
EVENT_FLOOR_REGISTRY_UPDATED,
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ from .helpers import (
|
|||||||
)
|
)
|
||||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||||
from .helpers.storage import get_internal_store_manager
|
from .helpers.storage import get_internal_store_manager
|
||||||
from .helpers.system_info import async_get_system_info
|
from .helpers.system_info import async_get_system_info, is_official_image
|
||||||
from .helpers.typing import ConfigType
|
from .helpers.typing import ConfigType
|
||||||
from .setup import (
|
from .setup import (
|
||||||
# _setup_started is marked as protected to make it clear
|
# _setup_started is marked as protected to make it clear
|
||||||
@ -104,7 +104,7 @@ from .setup import (
|
|||||||
from .util.async_ import create_eager_task
|
from .util.async_ import create_eager_task
|
||||||
from .util.hass_dict import HassKey
|
from .util.hass_dict import HassKey
|
||||||
from .util.logging import async_activate_log_queue_handler
|
from .util.logging import async_activate_log_queue_handler
|
||||||
from .util.package import async_get_user_site, is_virtual_env
|
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||||
|
|
||||||
with contextlib.suppress(ImportError):
|
with contextlib.suppress(ImportError):
|
||||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||||
@ -223,8 +223,10 @@ CRITICAL_INTEGRATIONS = {
|
|||||||
SETUP_ORDER = (
|
SETUP_ORDER = (
|
||||||
# Load logging and http deps as soon as possible
|
# Load logging and http deps as soon as possible
|
||||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
|
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
|
||||||
# Setup frontend and recorder
|
# Setup frontend
|
||||||
("frontend, recorder", {*FRONTEND_INTEGRATIONS, *RECORDER_INTEGRATIONS}),
|
("frontend", FRONTEND_INTEGRATIONS),
|
||||||
|
# Setup recorder
|
||||||
|
("recorder", RECORDER_INTEGRATIONS),
|
||||||
# Start up debuggers. Start these first in case they want to wait.
|
# Start up debuggers. Start these first in case they want to wait.
|
||||||
("debugger", DEBUGGER_INTEGRATIONS),
|
("debugger", DEBUGGER_INTEGRATIONS),
|
||||||
)
|
)
|
||||||
@ -407,6 +409,10 @@ def _init_blocking_io_modules_in_executor() -> None:
|
|||||||
# Initialize the mimetypes module to avoid blocking calls
|
# Initialize the mimetypes module to avoid blocking calls
|
||||||
# to the filesystem to load the mime.types file.
|
# to the filesystem to load the mime.types file.
|
||||||
mimetypes.init()
|
mimetypes.init()
|
||||||
|
# Initialize is_official_image and is_docker_env to avoid blocking calls
|
||||||
|
# to the filesystem.
|
||||||
|
is_official_image()
|
||||||
|
is_docker_env()
|
||||||
|
|
||||||
|
|
||||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||||
@ -902,7 +908,13 @@ async def _async_resolve_domains_to_setup(
|
|||||||
await asyncio.gather(*resolve_dependencies_tasks)
|
await asyncio.gather(*resolve_dependencies_tasks)
|
||||||
|
|
||||||
for itg in integrations_to_process:
|
for itg in integrations_to_process:
|
||||||
for dep in itg.all_dependencies:
|
try:
|
||||||
|
all_deps = itg.all_dependencies
|
||||||
|
except RuntimeError:
|
||||||
|
# Integration.all_dependencies raises RuntimeError if
|
||||||
|
# dependencies could not be resolved
|
||||||
|
continue
|
||||||
|
for dep in all_deps:
|
||||||
if dep in domains_to_setup:
|
if dep in domains_to_setup:
|
||||||
continue
|
continue
|
||||||
domains_to_setup.add(dep)
|
domains_to_setup.add(dep)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"domain": "logitech",
|
"domain": "logitech",
|
||||||
"name": "Logitech",
|
"name": "Logitech",
|
||||||
"integrations": ["harmony", "ue_smart_radio", "squeezebox"]
|
"integrations": ["harmony", "squeezebox"]
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"domain": "tesla",
|
"domain": "tesla",
|
||||||
"name": "Tesla",
|
"name": "Tesla",
|
||||||
"integrations": ["powerwall", "tesla_wall_connector"]
|
"integrations": ["powerwall", "tesla_wall_connector", "tesla_fleet"]
|
||||||
}
|
}
|
||||||
|
@ -206,7 +206,8 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
"""Set the HVAC Mode and State."""
|
"""Set the HVAC Mode and State."""
|
||||||
if hvac_mode == HVACMode.OFF:
|
if hvac_mode == HVACMode.OFF:
|
||||||
return await self.async_turn_off()
|
await self.async_turn_off()
|
||||||
|
return
|
||||||
if hvac_mode == HVACMode.HEAT_COOL and self.preset_mode != ADVANTAGE_AIR_MYAUTO:
|
if hvac_mode == HVACMode.HEAT_COOL and self.preset_mode != ADVANTAGE_AIR_MYAUTO:
|
||||||
raise ServiceValidationError("Heat/Cool is not supported in this mode")
|
raise ServiceValidationError("Heat/Cool is not supported in this mode")
|
||||||
await self.async_update_ac(
|
await self.async_update_ac(
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aemet_opendata"],
|
"loggers": ["aemet_opendata"],
|
||||||
"requirements": ["AEMET-OpenData==0.5.2"]
|
"requirements": ["AEMET-OpenData==0.5.3"]
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from airgradient import AirGradientClient
|
from airgradient import AirGradientClient, get_model_name
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_HOST, Platform
|
from homeassistant.const import CONF_HOST, Platform
|
||||||
@ -35,7 +35,7 @@ class AirGradientData:
|
|||||||
type AirGradientConfigEntry = ConfigEntry[AirGradientData]
|
type AirGradientConfigEntry = ConfigEntry[AirGradientData]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool:
|
||||||
"""Set up Airgradient from a config entry."""
|
"""Set up Airgradient from a config entry."""
|
||||||
|
|
||||||
client = AirGradientClient(
|
client = AirGradientClient(
|
||||||
@ -53,7 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
config_entry_id=entry.entry_id,
|
config_entry_id=entry.entry_id,
|
||||||
identifiers={(DOMAIN, measurement_coordinator.serial_number)},
|
identifiers={(DOMAIN, measurement_coordinator.serial_number)},
|
||||||
manufacturer="AirGradient",
|
manufacturer="AirGradient",
|
||||||
model=measurement_coordinator.data.model,
|
model=get_model_name(measurement_coordinator.data.model),
|
||||||
|
model_id=measurement_coordinator.data.model,
|
||||||
serial_number=measurement_coordinator.data.serial_number,
|
serial_number=measurement_coordinator.data.serial_number,
|
||||||
sw_version=measurement_coordinator.data.firmware_version,
|
sw_version=measurement_coordinator.data.firmware_version,
|
||||||
)
|
)
|
||||||
@ -68,6 +69,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(
|
||||||
|
hass: HomeAssistant, entry: AirGradientConfigEntry
|
||||||
|
) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
@ -2,9 +2,13 @@
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from airgradient import AirGradientClient, AirGradientError, ConfigurationControl
|
from airgradient import (
|
||||||
|
AirGradientClient,
|
||||||
|
AirGradientError,
|
||||||
|
AirGradientParseError,
|
||||||
|
ConfigurationControl,
|
||||||
|
)
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from mashumaro import MissingField
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import zeroconf
|
from homeassistant.components import zeroconf
|
||||||
@ -83,10 +87,10 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self.client = AirGradientClient(user_input[CONF_HOST], session=session)
|
self.client = AirGradientClient(user_input[CONF_HOST], session=session)
|
||||||
try:
|
try:
|
||||||
current_measures = await self.client.get_current_measures()
|
current_measures = await self.client.get_current_measures()
|
||||||
|
except AirGradientParseError:
|
||||||
|
return self.async_abort(reason="invalid_version")
|
||||||
except AirGradientError:
|
except AirGradientError:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except MissingField:
|
|
||||||
return self.async_abort(reason="invalid_version")
|
|
||||||
else:
|
else:
|
||||||
await self.async_set_unique_id(current_measures.serial_number)
|
await self.async_set_unique_id(current_measures.serial_number)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
@ -19,7 +19,6 @@ if TYPE_CHECKING:
|
|||||||
class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
||||||
"""Class to manage fetching AirGradient data."""
|
"""Class to manage fetching AirGradient data."""
|
||||||
|
|
||||||
_update_interval: timedelta
|
|
||||||
config_entry: AirGradientConfigEntry
|
config_entry: AirGradientConfigEntry
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
||||||
@ -28,7 +27,7 @@ class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
|||||||
hass,
|
hass,
|
||||||
logger=LOGGER,
|
logger=LOGGER,
|
||||||
name=f"AirGradient {client.host}",
|
name=f"AirGradient {client.host}",
|
||||||
update_interval=self._update_interval,
|
update_interval=timedelta(minutes=1),
|
||||||
)
|
)
|
||||||
self.client = client
|
self.client = client
|
||||||
assert self.config_entry.unique_id
|
assert self.config_entry.unique_id
|
||||||
@ -47,8 +46,6 @@ class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
|||||||
class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]):
|
class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]):
|
||||||
"""Class to manage fetching AirGradient data."""
|
"""Class to manage fetching AirGradient data."""
|
||||||
|
|
||||||
_update_interval = timedelta(minutes=1)
|
|
||||||
|
|
||||||
async def _update_data(self) -> Measures:
|
async def _update_data(self) -> Measures:
|
||||||
return await self.client.get_current_measures()
|
return await self.client.get_current_measures()
|
||||||
|
|
||||||
@ -56,7 +53,5 @@ class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]):
|
|||||||
class AirGradientConfigCoordinator(AirGradientCoordinator[Config]):
|
class AirGradientConfigCoordinator(AirGradientCoordinator[Config]):
|
||||||
"""Class to manage fetching AirGradient data."""
|
"""Class to manage fetching AirGradient data."""
|
||||||
|
|
||||||
_update_interval = timedelta(minutes=5)
|
|
||||||
|
|
||||||
async def _update_data(self) -> Config:
|
async def _update_data(self) -> Config:
|
||||||
return await self.client.get_config()
|
return await self.client.get_config()
|
||||||
|
@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airgradient==0.6.1"],
|
"requirements": ["airgradient==0.7.1"],
|
||||||
"zeroconf": ["_airgradient._tcp.local."]
|
"zeroconf": ["_airgradient._tcp.local."]
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,8 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
|||||||
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
|
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
|
||||||
|
|
||||||
if hvac_mode == HVACMode.OFF:
|
if hvac_mode == HVACMode.OFF:
|
||||||
return await self.async_turn_off()
|
await self.async_turn_off()
|
||||||
|
return
|
||||||
await self._airtouch.SetCoolingModeForAc(
|
await self._airtouch.SetCoolingModeForAc(
|
||||||
self._ac_number, HA_STATE_TO_AT[hvac_mode]
|
self._ac_number, HA_STATE_TO_AT[hvac_mode]
|
||||||
)
|
)
|
||||||
@ -262,7 +263,8 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
|||||||
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
|
raise ValueError(f"Unsupported HVAC mode: {hvac_mode}")
|
||||||
|
|
||||||
if hvac_mode == HVACMode.OFF:
|
if hvac_mode == HVACMode.OFF:
|
||||||
return await self.async_turn_off()
|
await self.async_turn_off()
|
||||||
|
return
|
||||||
if self.hvac_mode == HVACMode.OFF:
|
if self.hvac_mode == HVACMode.OFF:
|
||||||
await self.async_turn_on()
|
await self.async_turn_on()
|
||||||
self._unit = self._airtouch.GetGroups()[self._group_number]
|
self._unit = self._airtouch.GetGroups()[self._group_number]
|
||||||
|
@ -11,7 +11,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER]
|
||||||
|
|
||||||
type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
||||||
|
|
||||||
|
@ -121,6 +121,7 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity):
|
|||||||
"""Base class for Airtouch5 Climate Entities."""
|
"""Base class for Airtouch5 Climate Entities."""
|
||||||
|
|
||||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
_attr_translation_key = DOMAIN
|
||||||
_attr_target_temperature_step = 1
|
_attr_target_temperature_step = 1
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
_enable_turn_on_off_backwards_compatibility = False
|
_enable_turn_on_off_backwards_compatibility = False
|
||||||
|
134
homeassistant/components/airtouch5/cover.py
Normal file
134
homeassistant/components/airtouch5/cover.py
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
"""Representation of the Damper for AirTouch 5 Devices."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from airtouch5py.airtouch5_simple_client import Airtouch5SimpleClient
|
||||||
|
from airtouch5py.packets.zone_control import (
|
||||||
|
ZoneControlZone,
|
||||||
|
ZoneSettingPower,
|
||||||
|
ZoneSettingValue,
|
||||||
|
)
|
||||||
|
from airtouch5py.packets.zone_name import ZoneName
|
||||||
|
from airtouch5py.packets.zone_status import ZoneStatusZone
|
||||||
|
|
||||||
|
from homeassistant.components.cover import (
|
||||||
|
ATTR_POSITION,
|
||||||
|
CoverDeviceClass,
|
||||||
|
CoverEntity,
|
||||||
|
CoverEntityFeature,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from . import Airtouch5ConfigEntry
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .entity import Airtouch5Entity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: Airtouch5ConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the Airtouch 5 Cover entities."""
|
||||||
|
client = config_entry.runtime_data
|
||||||
|
|
||||||
|
# Each zone has a cover for its open percentage
|
||||||
|
async_add_entities(
|
||||||
|
Airtouch5ZoneOpenPercentage(
|
||||||
|
client, zone, client.latest_zone_status[zone.zone_number].has_sensor
|
||||||
|
)
|
||||||
|
for zone in client.zones
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Airtouch5ZoneOpenPercentage(CoverEntity, Airtouch5Entity):
|
||||||
|
"""How open the damper is in each zone."""
|
||||||
|
|
||||||
|
_attr_device_class = CoverDeviceClass.DAMPER
|
||||||
|
_attr_translation_key = "damper"
|
||||||
|
|
||||||
|
# Zones with temperature sensors shouldn't be manually controlled.
|
||||||
|
# We allow it but warn the user in the integration documentation.
|
||||||
|
_attr_supported_features = (
|
||||||
|
CoverEntityFeature.SET_POSITION
|
||||||
|
| CoverEntityFeature.OPEN
|
||||||
|
| CoverEntityFeature.CLOSE
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, client: Airtouch5SimpleClient, zone_name: ZoneName, has_sensor: bool
|
||||||
|
) -> None:
|
||||||
|
"""Initialise the Cover Entity."""
|
||||||
|
super().__init__(client)
|
||||||
|
self._zone_name = zone_name
|
||||||
|
|
||||||
|
self._attr_unique_id = f"zone_{zone_name.zone_number}_open_percentage"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, f"zone_{zone_name.zone_number}")},
|
||||||
|
name=zone_name.zone_name,
|
||||||
|
manufacturer="Polyaire",
|
||||||
|
model="AirTouch 5",
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_update_attrs(self, data: dict[int, ZoneStatusZone]) -> None:
|
||||||
|
if self._zone_name.zone_number not in data:
|
||||||
|
return
|
||||||
|
status = data[self._zone_name.zone_number]
|
||||||
|
|
||||||
|
self._attr_current_cover_position = int(status.open_percentage * 100)
|
||||||
|
if status.open_percentage == 0:
|
||||||
|
self._attr_is_closed = True
|
||||||
|
else:
|
||||||
|
self._attr_is_closed = False
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Add data updated listener after this object has been initialized."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
self._client.zone_status_callbacks.append(self._async_update_attrs)
|
||||||
|
self._async_update_attrs(self._client.latest_zone_status)
|
||||||
|
|
||||||
|
async def async_will_remove_from_hass(self) -> None:
|
||||||
|
"""Remove data updated listener after this object has been initialized."""
|
||||||
|
await super().async_will_remove_from_hass()
|
||||||
|
self._client.zone_status_callbacks.remove(self._async_update_attrs)
|
||||||
|
|
||||||
|
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||||
|
"""Open the damper."""
|
||||||
|
await self._set_cover_position(100)
|
||||||
|
|
||||||
|
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||||
|
"""Close damper."""
|
||||||
|
await self._set_cover_position(0)
|
||||||
|
|
||||||
|
async def async_set_cover_position(self, **kwargs: Any) -> None:
|
||||||
|
"""Update the damper to a specific position."""
|
||||||
|
|
||||||
|
if (position := kwargs.get(ATTR_POSITION)) is None:
|
||||||
|
_LOGGER.debug("Argument `position` is missing in set_cover_position")
|
||||||
|
return
|
||||||
|
await self._set_cover_position(position)
|
||||||
|
|
||||||
|
async def _set_cover_position(self, position_percent: float) -> None:
|
||||||
|
power: ZoneSettingPower
|
||||||
|
|
||||||
|
if position_percent == 0:
|
||||||
|
power = ZoneSettingPower.SET_TO_OFF
|
||||||
|
else:
|
||||||
|
power = ZoneSettingPower.SET_TO_ON
|
||||||
|
|
||||||
|
zcz = ZoneControlZone(
|
||||||
|
self._zone_name.zone_number,
|
||||||
|
ZoneSettingValue.SET_OPEN_PERCENTAGE,
|
||||||
|
power,
|
||||||
|
position_percent / 100.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
packet = self._client.data_packet_factory.zone_control([zcz])
|
||||||
|
await self._client.send_packet(packet)
|
@ -6,15 +6,12 @@ from airtouch5py.airtouch5_simple_client import Airtouch5SimpleClient
|
|||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
|
|
||||||
|
|
||||||
class Airtouch5Entity(Entity):
|
class Airtouch5Entity(Entity):
|
||||||
"""Base class for Airtouch5 entities."""
|
"""Base class for Airtouch5 entities."""
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
_attr_translation_key = DOMAIN
|
|
||||||
|
|
||||||
def __init__(self, client: Airtouch5SimpleClient) -> None:
|
def __init__(self, client: Airtouch5SimpleClient) -> None:
|
||||||
"""Initialise the Entity."""
|
"""Initialise the Entity."""
|
||||||
|
@ -27,6 +27,11 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"cover": {
|
||||||
|
"damper": {
|
||||||
|
"name": "[%key:component::cover::entity_component::damper::name%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,33 +82,54 @@ async def async_setup_entry(
|
|||||||
"""Add Airzone binary sensors from a config_entry."""
|
"""Add Airzone binary sensors from a config_entry."""
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
binary_sensors: list[AirzoneBinarySensor] = [
|
added_systems: set[str] = set()
|
||||||
|
added_zones: set[str] = set()
|
||||||
|
|
||||||
|
def _async_entity_listener() -> None:
|
||||||
|
"""Handle additions of binary sensors."""
|
||||||
|
|
||||||
|
entities: list[AirzoneBinarySensor] = []
|
||||||
|
|
||||||
|
systems_data = coordinator.data.get(AZD_SYSTEMS, {})
|
||||||
|
received_systems = set(systems_data)
|
||||||
|
new_systems = received_systems - added_systems
|
||||||
|
if new_systems:
|
||||||
|
entities.extend(
|
||||||
AirzoneSystemBinarySensor(
|
AirzoneSystemBinarySensor(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
entry,
|
entry,
|
||||||
system_id,
|
system_id,
|
||||||
system_data,
|
systems_data.get(system_id),
|
||||||
)
|
)
|
||||||
for system_id, system_data in coordinator.data[AZD_SYSTEMS].items()
|
for system_id in new_systems
|
||||||
for description in SYSTEM_BINARY_SENSOR_TYPES
|
for description in SYSTEM_BINARY_SENSOR_TYPES
|
||||||
if description.key in system_data
|
if description.key in systems_data.get(system_id)
|
||||||
]
|
)
|
||||||
|
added_systems.update(new_systems)
|
||||||
|
|
||||||
binary_sensors.extend(
|
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||||
|
received_zones = set(zones_data)
|
||||||
|
new_zones = received_zones - added_zones
|
||||||
|
if new_zones:
|
||||||
|
entities.extend(
|
||||||
AirzoneZoneBinarySensor(
|
AirzoneZoneBinarySensor(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
entry,
|
entry,
|
||||||
system_zone_id,
|
system_zone_id,
|
||||||
zone_data,
|
zones_data.get(system_zone_id),
|
||||||
)
|
)
|
||||||
for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items()
|
for system_zone_id in new_zones
|
||||||
for description in ZONE_BINARY_SENSOR_TYPES
|
for description in ZONE_BINARY_SENSOR_TYPES
|
||||||
if description.key in zone_data
|
if description.key in zones_data.get(system_zone_id)
|
||||||
)
|
)
|
||||||
|
added_zones.update(new_zones)
|
||||||
|
|
||||||
async_add_entities(binary_sensors)
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||||
|
_async_entity_listener()
|
||||||
|
|
||||||
|
|
||||||
class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity):
|
class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity):
|
||||||
|
@ -102,17 +102,31 @@ async def async_setup_entry(
|
|||||||
entry: AirzoneConfigEntry,
|
entry: AirzoneConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add Airzone sensors from a config_entry."""
|
"""Add Airzone climate from a config_entry."""
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
|
added_zones: set[str] = set()
|
||||||
|
|
||||||
|
def _async_entity_listener() -> None:
|
||||||
|
"""Handle additions of climate."""
|
||||||
|
|
||||||
|
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||||
|
received_zones = set(zones_data)
|
||||||
|
new_zones = received_zones - added_zones
|
||||||
|
if new_zones:
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
AirzoneClimate(
|
AirzoneClimate(
|
||||||
coordinator,
|
coordinator,
|
||||||
entry,
|
entry,
|
||||||
system_zone_id,
|
system_zone_id,
|
||||||
zone_data,
|
zones_data.get(system_zone_id),
|
||||||
)
|
)
|
||||||
for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items()
|
for system_zone_id in new_zones
|
||||||
)
|
)
|
||||||
|
added_zones.update(new_zones)
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||||
|
_async_entity_listener()
|
||||||
|
|
||||||
|
|
||||||
class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
||||||
|
@ -11,5 +11,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairzone"],
|
"loggers": ["aioairzone"],
|
||||||
"requirements": ["aioairzone==0.7.7"]
|
"requirements": ["aioairzone==0.8.1"]
|
||||||
}
|
}
|
||||||
|
@ -83,21 +83,34 @@ async def async_setup_entry(
|
|||||||
entry: AirzoneConfigEntry,
|
entry: AirzoneConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add Airzone sensors from a config_entry."""
|
"""Add Airzone select from a config_entry."""
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
|
added_zones: set[str] = set()
|
||||||
|
|
||||||
|
def _async_entity_listener() -> None:
|
||||||
|
"""Handle additions of select."""
|
||||||
|
|
||||||
|
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||||
|
received_zones = set(zones_data)
|
||||||
|
new_zones = received_zones - added_zones
|
||||||
|
if new_zones:
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
AirzoneZoneSelect(
|
AirzoneZoneSelect(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
entry,
|
entry,
|
||||||
system_zone_id,
|
system_zone_id,
|
||||||
zone_data,
|
zones_data.get(system_zone_id),
|
||||||
)
|
)
|
||||||
|
for system_zone_id in new_zones
|
||||||
for description in ZONE_SELECT_TYPES
|
for description in ZONE_SELECT_TYPES
|
||||||
for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items()
|
if description.key in zones_data.get(system_zone_id)
|
||||||
if description.key in zone_data
|
|
||||||
)
|
)
|
||||||
|
added_zones.update(new_zones)
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||||
|
_async_entity_listener()
|
||||||
|
|
||||||
|
|
||||||
class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
||||||
|
@ -85,21 +85,37 @@ async def async_setup_entry(
|
|||||||
"""Add Airzone sensors from a config_entry."""
|
"""Add Airzone sensors from a config_entry."""
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
sensors: list[AirzoneSensor] = [
|
added_zones: set[str] = set()
|
||||||
|
|
||||||
|
def _async_entity_listener() -> None:
|
||||||
|
"""Handle additions of sensors."""
|
||||||
|
|
||||||
|
entities: list[AirzoneSensor] = []
|
||||||
|
|
||||||
|
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||||
|
received_zones = set(zones_data)
|
||||||
|
new_zones = received_zones - added_zones
|
||||||
|
if new_zones:
|
||||||
|
entities.extend(
|
||||||
AirzoneZoneSensor(
|
AirzoneZoneSensor(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
entry,
|
entry,
|
||||||
system_zone_id,
|
system_zone_id,
|
||||||
zone_data,
|
zones_data.get(system_zone_id),
|
||||||
)
|
)
|
||||||
for system_zone_id, zone_data in coordinator.data[AZD_ZONES].items()
|
for system_zone_id in new_zones
|
||||||
for description in ZONE_SENSOR_TYPES
|
for description in ZONE_SENSOR_TYPES
|
||||||
if description.key in zone_data
|
if description.key in zones_data.get(system_zone_id)
|
||||||
]
|
)
|
||||||
|
added_zones.update(new_zones)
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
entities: list[AirzoneSensor] = []
|
||||||
|
|
||||||
if AZD_HOT_WATER in coordinator.data:
|
if AZD_HOT_WATER in coordinator.data:
|
||||||
sensors.extend(
|
entities.extend(
|
||||||
AirzoneHotWaterSensor(
|
AirzoneHotWaterSensor(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
@ -110,7 +126,7 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if AZD_WEBSERVER in coordinator.data:
|
if AZD_WEBSERVER in coordinator.data:
|
||||||
sensors.extend(
|
entities.extend(
|
||||||
AirzoneWebServerSensor(
|
AirzoneWebServerSensor(
|
||||||
coordinator,
|
coordinator,
|
||||||
description,
|
description,
|
||||||
@ -120,7 +136,10 @@ async def async_setup_entry(
|
|||||||
if description.key in coordinator.data[AZD_WEBSERVER]
|
if description.key in coordinator.data[AZD_WEBSERVER]
|
||||||
)
|
)
|
||||||
|
|
||||||
async_add_entities(sensors)
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||||
|
_async_entity_listener()
|
||||||
|
|
||||||
|
|
||||||
class AirzoneSensor(AirzoneEntity, SensorEntity):
|
class AirzoneSensor(AirzoneEntity, SensorEntity):
|
||||||
|
@ -61,7 +61,7 @@ async def async_setup_entry(
|
|||||||
entry: AirzoneConfigEntry,
|
entry: AirzoneConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add Airzone sensors from a config_entry."""
|
"""Add Airzone Water Heater from a config_entry."""
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
if AZD_HOT_WATER in coordinator.data:
|
if AZD_HOT_WATER in coordinator.data:
|
||||||
async_add_entities([AirzoneWaterHeater(coordinator, entry)])
|
async_add_entities([AirzoneWaterHeater(coordinator, entry)])
|
||||||
|
@ -14,6 +14,7 @@ from aioairzone_cloud.const import (
|
|||||||
AZD_FLOOR_DEMAND,
|
AZD_FLOOR_DEMAND,
|
||||||
AZD_PROBLEMS,
|
AZD_PROBLEMS,
|
||||||
AZD_SYSTEMS,
|
AZD_SYSTEMS,
|
||||||
|
AZD_THERMOSTAT_BATTERY_LOW,
|
||||||
AZD_WARNINGS,
|
AZD_WARNINGS,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
)
|
)
|
||||||
@ -88,6 +89,10 @@ ZONE_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]
|
|||||||
key=AZD_AQ_ACTIVE,
|
key=AZD_AQ_ACTIVE,
|
||||||
translation_key="air_quality_active",
|
translation_key="air_quality_active",
|
||||||
),
|
),
|
||||||
|
AirzoneBinarySensorEntityDescription(
|
||||||
|
device_class=BinarySensorDeviceClass.BATTERY,
|
||||||
|
key=AZD_THERMOSTAT_BATTERY_LOW,
|
||||||
|
),
|
||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
key=AZD_FLOOR_DEMAND,
|
key=AZD_FLOOR_DEMAND,
|
||||||
|
@ -13,9 +13,12 @@ from aioairzone_cloud.const import (
|
|||||||
AZD_GROUPS,
|
AZD_GROUPS,
|
||||||
AZD_HOT_WATERS,
|
AZD_HOT_WATERS,
|
||||||
AZD_INSTALLATIONS,
|
AZD_INSTALLATIONS,
|
||||||
|
AZD_MODEL,
|
||||||
AZD_NAME,
|
AZD_NAME,
|
||||||
AZD_SYSTEM_ID,
|
AZD_SYSTEM_ID,
|
||||||
AZD_SYSTEMS,
|
AZD_SYSTEMS,
|
||||||
|
AZD_THERMOSTAT_FW,
|
||||||
|
AZD_THERMOSTAT_MODEL,
|
||||||
AZD_WEBSERVER,
|
AZD_WEBSERVER,
|
||||||
AZD_WEBSERVERS,
|
AZD_WEBSERVERS,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
@ -69,6 +72,7 @@ class AirzoneAidooEntity(AirzoneEntity):
|
|||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, aidoo_id)},
|
identifiers={(DOMAIN, aidoo_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
|
model=aidoo_data[AZD_MODEL],
|
||||||
name=aidoo_data[AZD_NAME],
|
name=aidoo_data[AZD_NAME],
|
||||||
via_device=(DOMAIN, aidoo_data[AZD_WEBSERVER]),
|
via_device=(DOMAIN, aidoo_data[AZD_WEBSERVER]),
|
||||||
)
|
)
|
||||||
@ -111,6 +115,7 @@ class AirzoneGroupEntity(AirzoneEntity):
|
|||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, group_id)},
|
identifiers={(DOMAIN, group_id)},
|
||||||
|
model="Group",
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
name=group_data[AZD_NAME],
|
name=group_data[AZD_NAME],
|
||||||
)
|
)
|
||||||
@ -154,6 +159,7 @@ class AirzoneHotWaterEntity(AirzoneEntity):
|
|||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, dhw_id)},
|
identifiers={(DOMAIN, dhw_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
|
model="Hot Water",
|
||||||
name=dhw_data[AZD_NAME],
|
name=dhw_data[AZD_NAME],
|
||||||
via_device=(DOMAIN, dhw_data[AZD_WEBSERVER]),
|
via_device=(DOMAIN, dhw_data[AZD_WEBSERVER]),
|
||||||
)
|
)
|
||||||
@ -195,6 +201,7 @@ class AirzoneInstallationEntity(AirzoneEntity):
|
|||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, inst_id)},
|
identifiers={(DOMAIN, inst_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
|
model="Installation",
|
||||||
name=inst_data[AZD_NAME],
|
name=inst_data[AZD_NAME],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -240,9 +247,11 @@ class AirzoneSystemEntity(AirzoneEntity):
|
|||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, system_id)},
|
identifiers={(DOMAIN, system_id)},
|
||||||
|
model=system_data.get(AZD_MODEL),
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
name=system_data[AZD_NAME],
|
name=system_data[AZD_NAME],
|
||||||
via_device=(DOMAIN, system_data[AZD_WEBSERVER]),
|
via_device=(DOMAIN, system_data[AZD_WEBSERVER]),
|
||||||
|
sw_version=system_data.get(AZD_FIRMWARE),
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_airzone_value(self, key: str) -> Any:
|
def get_airzone_value(self, key: str) -> Any:
|
||||||
@ -270,6 +279,7 @@ class AirzoneWebServerEntity(AirzoneEntity):
|
|||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
connections={(dr.CONNECTION_NETWORK_MAC, ws_id)},
|
connections={(dr.CONNECTION_NETWORK_MAC, ws_id)},
|
||||||
identifiers={(DOMAIN, ws_id)},
|
identifiers={(DOMAIN, ws_id)},
|
||||||
|
model="WebServer",
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
name=ws_data[AZD_NAME],
|
name=ws_data[AZD_NAME],
|
||||||
sw_version=ws_data[AZD_FIRMWARE],
|
sw_version=ws_data[AZD_FIRMWARE],
|
||||||
@ -300,9 +310,11 @@ class AirzoneZoneEntity(AirzoneEntity):
|
|||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, zone_id)},
|
identifiers={(DOMAIN, zone_id)},
|
||||||
|
model=zone_data.get(AZD_THERMOSTAT_MODEL),
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
name=zone_data[AZD_NAME],
|
name=zone_data[AZD_NAME],
|
||||||
via_device=(DOMAIN, self.system_id),
|
via_device=(DOMAIN, self.system_id),
|
||||||
|
sw_version=zone_data.get(AZD_THERMOSTAT_FW),
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_airzone_value(self, key: str) -> Any:
|
def get_airzone_value(self, key: str) -> Any:
|
||||||
|
15
homeassistant/components/airzone_cloud/icons.json
Normal file
15
homeassistant/components/airzone_cloud/icons.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"cpu_usage": {
|
||||||
|
"default": "mdi:cpu-32-bit"
|
||||||
|
},
|
||||||
|
"free_memory": {
|
||||||
|
"default": "mdi:memory"
|
||||||
|
},
|
||||||
|
"thermostat_coverage": {
|
||||||
|
"default": "mdi:signal"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aioairzone_cloud"],
|
"loggers": ["aioairzone_cloud"],
|
||||||
"requirements": ["aioairzone-cloud==0.5.3"]
|
"requirements": ["aioairzone-cloud==0.6.1"]
|
||||||
}
|
}
|
||||||
|
@ -10,8 +10,12 @@ from aioairzone_cloud.const import (
|
|||||||
AZD_AQ_PM_1,
|
AZD_AQ_PM_1,
|
||||||
AZD_AQ_PM_2P5,
|
AZD_AQ_PM_2P5,
|
||||||
AZD_AQ_PM_10,
|
AZD_AQ_PM_10,
|
||||||
|
AZD_CPU_USAGE,
|
||||||
AZD_HUMIDITY,
|
AZD_HUMIDITY,
|
||||||
|
AZD_MEMORY_FREE,
|
||||||
AZD_TEMP,
|
AZD_TEMP,
|
||||||
|
AZD_THERMOSTAT_BATTERY,
|
||||||
|
AZD_THERMOSTAT_COVERAGE,
|
||||||
AZD_WEBSERVERS,
|
AZD_WEBSERVERS,
|
||||||
AZD_WIFI_RSSI,
|
AZD_WIFI_RSSI,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
@ -28,6 +32,7 @@ from homeassistant.const import (
|
|||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
|
UnitOfInformation,
|
||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@ -52,6 +57,22 @@ AIDOO_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
WEBSERVER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
WEBSERVER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||||
|
SensorEntityDescription(
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
key=AZD_CPU_USAGE,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
translation_key="cpu_usage",
|
||||||
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
key=AZD_MEMORY_FREE,
|
||||||
|
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
translation_key="free_memory",
|
||||||
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
@ -98,6 +119,20 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
|||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.BATTERY,
|
||||||
|
key=AZD_THERMOSTAT_BATTERY,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
key=AZD_THERMOSTAT_COVERAGE,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
translation_key="thermostat_coverage",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,6 +37,17 @@
|
|||||||
"auto": "Auto"
|
"auto": "Auto"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"sensor": {
|
||||||
|
"cpu_usage": {
|
||||||
|
"name": "CPU usage"
|
||||||
|
},
|
||||||
|
"free_memory": {
|
||||||
|
"name": "Free memory"
|
||||||
|
},
|
||||||
|
"thermostat_coverage": {
|
||||||
|
"name": "Signal percentage"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,11 +2,10 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Generator
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from typing_extensions import Generator
|
|
||||||
|
|
||||||
from homeassistant.components import (
|
from homeassistant.components import (
|
||||||
button,
|
button,
|
||||||
climate,
|
climate,
|
||||||
@ -19,6 +18,7 @@ from homeassistant.components import (
|
|||||||
light,
|
light,
|
||||||
media_player,
|
media_player,
|
||||||
number,
|
number,
|
||||||
|
remote,
|
||||||
timer,
|
timer,
|
||||||
vacuum,
|
vacuum,
|
||||||
valve,
|
valve,
|
||||||
@ -439,6 +439,8 @@ class AlexaPowerController(AlexaCapability):
|
|||||||
is_on = self.entity.state == fan.STATE_ON
|
is_on = self.entity.state == fan.STATE_ON
|
||||||
elif self.entity.domain == humidifier.DOMAIN:
|
elif self.entity.domain == humidifier.DOMAIN:
|
||||||
is_on = self.entity.state == humidifier.STATE_ON
|
is_on = self.entity.state == humidifier.STATE_ON
|
||||||
|
elif self.entity.domain == remote.DOMAIN:
|
||||||
|
is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN)
|
||||||
elif self.entity.domain == vacuum.DOMAIN:
|
elif self.entity.domain == vacuum.DOMAIN:
|
||||||
is_on = self.entity.state == vacuum.STATE_CLEANING
|
is_on = self.entity.state == vacuum.STATE_CLEANING
|
||||||
elif self.entity.domain == timer.DOMAIN:
|
elif self.entity.domain == timer.DOMAIN:
|
||||||
@ -1436,6 +1438,12 @@ class AlexaModeController(AlexaCapability):
|
|||||||
if mode in modes:
|
if mode in modes:
|
||||||
return f"{humidifier.ATTR_MODE}.{mode}"
|
return f"{humidifier.ATTR_MODE}.{mode}"
|
||||||
|
|
||||||
|
# Remote Activity
|
||||||
|
if self.instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
|
||||||
|
activity = self.entity.attributes.get(remote.ATTR_CURRENT_ACTIVITY, None)
|
||||||
|
if activity in self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST, []):
|
||||||
|
return f"{remote.ATTR_ACTIVITY}.{activity}"
|
||||||
|
|
||||||
# Water heater operation mode
|
# Water heater operation mode
|
||||||
if self.instance == f"{water_heater.DOMAIN}.{water_heater.ATTR_OPERATION_MODE}":
|
if self.instance == f"{water_heater.DOMAIN}.{water_heater.ATTR_OPERATION_MODE}":
|
||||||
operation_mode = self.entity.attributes.get(
|
operation_mode = self.entity.attributes.get(
|
||||||
@ -1550,6 +1558,24 @@ class AlexaModeController(AlexaCapability):
|
|||||||
)
|
)
|
||||||
return self._resource.serialize_capability_resources()
|
return self._resource.serialize_capability_resources()
|
||||||
|
|
||||||
|
# Remote Resource
|
||||||
|
if self.instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
|
||||||
|
# Use the mode controller for a remote because the input controller
|
||||||
|
# only allows a preset of names as an input.
|
||||||
|
self._resource = AlexaModeResource([AlexaGlobalCatalog.SETTING_MODE], False)
|
||||||
|
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
|
||||||
|
for activity in activities:
|
||||||
|
self._resource.add_mode(
|
||||||
|
f"{remote.ATTR_ACTIVITY}.{activity}", [activity]
|
||||||
|
)
|
||||||
|
# Remotes with a single activity completely break Alexa discovery, add a
|
||||||
|
# fake activity to the mode controller (see issue #53832).
|
||||||
|
if len(activities) == 1:
|
||||||
|
self._resource.add_mode(
|
||||||
|
f"{remote.ATTR_ACTIVITY}.{PRESET_MODE_NA}", [PRESET_MODE_NA]
|
||||||
|
)
|
||||||
|
return self._resource.serialize_capability_resources()
|
||||||
|
|
||||||
# Cover Position Resources
|
# Cover Position Resources
|
||||||
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
if self.instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
||||||
self._resource = AlexaModeResource(
|
self._resource = AlexaModeResource(
|
||||||
|
@ -88,7 +88,7 @@ API_THERMOSTAT_MODES_CUSTOM = {
|
|||||||
API_THERMOSTAT_PRESETS = {climate.PRESET_ECO: "ECO"}
|
API_THERMOSTAT_PRESETS = {climate.PRESET_ECO: "ECO"}
|
||||||
|
|
||||||
# AlexaModeController does not like a single mode for the fan preset or humidifier mode,
|
# AlexaModeController does not like a single mode for the fan preset or humidifier mode,
|
||||||
# we add PRESET_MODE_NA if a fan / humidifier has only one preset_mode
|
# we add PRESET_MODE_NA if a fan / humidifier / remote has only one preset_mode
|
||||||
PRESET_MODE_NA = "-"
|
PRESET_MODE_NA = "-"
|
||||||
|
|
||||||
STORAGE_ACCESS_TOKEN = "access_token"
|
STORAGE_ACCESS_TOKEN = "access_token"
|
||||||
|
@ -2,12 +2,10 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Iterable
|
from collections.abc import Generator, Iterable
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from typing_extensions import Generator
|
|
||||||
|
|
||||||
from homeassistant.components import (
|
from homeassistant.components import (
|
||||||
alarm_control_panel,
|
alarm_control_panel,
|
||||||
alert,
|
alert,
|
||||||
@ -29,6 +27,7 @@ from homeassistant.components import (
|
|||||||
lock,
|
lock,
|
||||||
media_player,
|
media_player,
|
||||||
number,
|
number,
|
||||||
|
remote,
|
||||||
scene,
|
scene,
|
||||||
script,
|
script,
|
||||||
sensor,
|
sensor,
|
||||||
@ -198,6 +197,10 @@ class DisplayCategory:
|
|||||||
# Indicates a device that prints.
|
# Indicates a device that prints.
|
||||||
PRINTER = "PRINTER"
|
PRINTER = "PRINTER"
|
||||||
|
|
||||||
|
# Indicates a decive that support stateless events,
|
||||||
|
# such as remote switches and smart buttons.
|
||||||
|
REMOTE = "REMOTE"
|
||||||
|
|
||||||
# Indicates a network router.
|
# Indicates a network router.
|
||||||
ROUTER = "ROUTER"
|
ROUTER = "ROUTER"
|
||||||
|
|
||||||
@ -647,6 +650,24 @@ class FanCapabilities(AlexaEntity):
|
|||||||
yield Alexa(self.entity)
|
yield Alexa(self.entity)
|
||||||
|
|
||||||
|
|
||||||
|
@ENTITY_ADAPTERS.register(remote.DOMAIN)
|
||||||
|
class RemoteCapabilities(AlexaEntity):
|
||||||
|
"""Class to represent Remote capabilities."""
|
||||||
|
|
||||||
|
def default_display_categories(self) -> list[str]:
|
||||||
|
"""Return the display categories for this entity."""
|
||||||
|
return [DisplayCategory.REMOTE]
|
||||||
|
|
||||||
|
def interfaces(self) -> Generator[AlexaCapability]:
|
||||||
|
"""Yield the supported interfaces."""
|
||||||
|
yield AlexaPowerController(self.entity)
|
||||||
|
yield AlexaModeController(
|
||||||
|
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
|
||||||
|
)
|
||||||
|
yield AlexaEndpointHealth(self.hass, self.entity)
|
||||||
|
yield Alexa(self.entity)
|
||||||
|
|
||||||
|
|
||||||
@ENTITY_ADAPTERS.register(humidifier.DOMAIN)
|
@ENTITY_ADAPTERS.register(humidifier.DOMAIN)
|
||||||
class HumidifierCapabilities(AlexaEntity):
|
class HumidifierCapabilities(AlexaEntity):
|
||||||
"""Class to represent Humidifier capabilities."""
|
"""Class to represent Humidifier capabilities."""
|
||||||
|
@ -21,6 +21,7 @@ from homeassistant.components import (
|
|||||||
light,
|
light,
|
||||||
media_player,
|
media_player,
|
||||||
number,
|
number,
|
||||||
|
remote,
|
||||||
timer,
|
timer,
|
||||||
vacuum,
|
vacuum,
|
||||||
valve,
|
valve,
|
||||||
@ -185,6 +186,8 @@ async def async_api_turn_on(
|
|||||||
service = fan.SERVICE_TURN_ON
|
service = fan.SERVICE_TURN_ON
|
||||||
elif domain == humidifier.DOMAIN:
|
elif domain == humidifier.DOMAIN:
|
||||||
service = humidifier.SERVICE_TURN_ON
|
service = humidifier.SERVICE_TURN_ON
|
||||||
|
elif domain == remote.DOMAIN:
|
||||||
|
service = remote.SERVICE_TURN_ON
|
||||||
elif domain == vacuum.DOMAIN:
|
elif domain == vacuum.DOMAIN:
|
||||||
supported = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
supported = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
if (
|
if (
|
||||||
@ -234,6 +237,8 @@ async def async_api_turn_off(
|
|||||||
service = climate.SERVICE_TURN_OFF
|
service = climate.SERVICE_TURN_OFF
|
||||||
elif domain == fan.DOMAIN:
|
elif domain == fan.DOMAIN:
|
||||||
service = fan.SERVICE_TURN_OFF
|
service = fan.SERVICE_TURN_OFF
|
||||||
|
elif domain == remote.DOMAIN:
|
||||||
|
service = remote.SERVICE_TURN_OFF
|
||||||
elif domain == humidifier.DOMAIN:
|
elif domain == humidifier.DOMAIN:
|
||||||
service = humidifier.SERVICE_TURN_OFF
|
service = humidifier.SERVICE_TURN_OFF
|
||||||
elif domain == vacuum.DOMAIN:
|
elif domain == vacuum.DOMAIN:
|
||||||
@ -1200,6 +1205,17 @@ async def async_api_set_mode(
|
|||||||
msg = f"Entity '{entity.entity_id}' does not support Mode '{mode}'"
|
msg = f"Entity '{entity.entity_id}' does not support Mode '{mode}'"
|
||||||
raise AlexaInvalidValueError(msg)
|
raise AlexaInvalidValueError(msg)
|
||||||
|
|
||||||
|
# Remote Activity
|
||||||
|
if instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}":
|
||||||
|
activity = mode.split(".")[1]
|
||||||
|
activities: list[str] | None = entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
|
||||||
|
if activity != PRESET_MODE_NA and activities and activity in activities:
|
||||||
|
service = remote.SERVICE_TURN_ON
|
||||||
|
data[remote.ATTR_ACTIVITY] = activity
|
||||||
|
else:
|
||||||
|
msg = f"Entity '{entity.entity_id}' does not support Mode '{mode}'"
|
||||||
|
raise AlexaInvalidValueError(msg)
|
||||||
|
|
||||||
# Water heater operation mode
|
# Water heater operation mode
|
||||||
elif instance == f"{water_heater.DOMAIN}.{water_heater.ATTR_OPERATION_MODE}":
|
elif instance == f"{water_heater.DOMAIN}.{water_heater.ATTR_OPERATION_MODE}":
|
||||||
operation_mode = mode.split(".")[1]
|
operation_mode = mode.split(".")[1]
|
||||||
|
@ -5,5 +5,6 @@
|
|||||||
"codeowners": ["@home-assistant/cloud", "@ochlocracy", "@jbouwh"],
|
"codeowners": ["@home-assistant/cloud", "@ochlocracy", "@jbouwh"],
|
||||||
"dependencies": ["http"],
|
"dependencies": ["http"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/alexa",
|
"documentation": "https://www.home-assistant.io/integrations/alexa",
|
||||||
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push"
|
"iot_class": "cloud_push"
|
||||||
}
|
}
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||||
"requirements": ["boto3==1.34.51"]
|
"requirements": ["boto3==1.34.131"]
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,18 @@ class AmberPriceSpikeBinarySensor(AmberPriceGridSensor):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class AmberDemandWindowBinarySensor(AmberPriceGridSensor):
|
||||||
|
"""Sensor to show whether demand window is active."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool | None:
|
||||||
|
"""Return true if the binary sensor is on."""
|
||||||
|
grid = self.coordinator.data["grid"]
|
||||||
|
if "demand_window" in grid:
|
||||||
|
return grid["demand_window"] # type: ignore[no-any-return]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: ConfigEntry,
|
entry: ConfigEntry,
|
||||||
@ -83,6 +95,14 @@ async def async_setup_entry(
|
|||||||
key="price_spike",
|
key="price_spike",
|
||||||
name=f"{entry.title} - Price Spike",
|
name=f"{entry.title} - Price Spike",
|
||||||
)
|
)
|
||||||
async_add_entities(
|
demand_window_description = BinarySensorEntityDescription(
|
||||||
[AmberPriceSpikeBinarySensor(coordinator, price_spike_description)]
|
key="demand_window",
|
||||||
|
name=f"{entry.title} - Demand Window",
|
||||||
|
translation_key="demand_window",
|
||||||
|
)
|
||||||
|
async_add_entities(
|
||||||
|
[
|
||||||
|
AmberPriceSpikeBinarySensor(coordinator, price_spike_description),
|
||||||
|
AmberDemandWindowBinarySensor(coordinator, demand_window_description),
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
@ -111,6 +111,9 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
]
|
]
|
||||||
result["grid"]["renewables"] = round(general[0].renewables)
|
result["grid"]["renewables"] = round(general[0].renewables)
|
||||||
result["grid"]["price_spike"] = general[0].spike_status.value
|
result["grid"]["price_spike"] = general[0].spike_status.value
|
||||||
|
tariff_information = general[0].tariff_information
|
||||||
|
if tariff_information:
|
||||||
|
result["grid"]["demand_window"] = tariff_information.demand_window
|
||||||
|
|
||||||
controlled_load = [
|
controlled_load = [
|
||||||
interval for interval in current if is_controlled_load(interval)
|
interval for interval in current if is_controlled_load(interval)
|
||||||
|
@ -13,6 +13,14 @@
|
|||||||
"renewables": {
|
"renewables": {
|
||||||
"default": "mdi:solar-power"
|
"default": "mdi:solar-power"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"binary_sensor": {
|
||||||
|
"demand_window": {
|
||||||
|
"default": "mdi:meter-electric",
|
||||||
|
"state": {
|
||||||
|
"off": "mdi:meter-electric-outline"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
|
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["amberelectric"],
|
"loggers": ["amberelectric"],
|
||||||
"requirements": ["amberelectric==1.1.0"]
|
"requirements": ["amberelectric==1.1.1"]
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ from datetime import timedelta
|
|||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from amcrest import AmcrestError
|
from amcrest import AmcrestError
|
||||||
from haffmpeg.camera import CameraMjpeg
|
from haffmpeg.camera import CameraMjpeg
|
||||||
@ -244,7 +245,9 @@ class AmcrestCam(Camera):
|
|||||||
websession = async_get_clientsession(self.hass)
|
websession = async_get_clientsession(self.hass)
|
||||||
streaming_url = self._api.mjpeg_url(typeno=self._resolution)
|
streaming_url = self._api.mjpeg_url(typeno=self._resolution)
|
||||||
stream_coro = websession.get(
|
stream_coro = websession.get(
|
||||||
streaming_url, auth=self._token, timeout=CAMERA_WEB_SESSION_TIMEOUT
|
streaming_url,
|
||||||
|
auth=self._token,
|
||||||
|
timeout=aiohttp.ClientTimeout(total=CAMERA_WEB_SESSION_TIMEOUT),
|
||||||
)
|
)
|
||||||
|
|
||||||
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["python_homeassistant_analytics"],
|
"loggers": ["python_homeassistant_analytics"],
|
||||||
"requirements": ["python-homeassistant-analytics==0.6.0"],
|
"requirements": ["python-homeassistant-analytics==0.7.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -101,7 +101,7 @@
|
|||||||
},
|
},
|
||||||
"learn_sendevent": {
|
"learn_sendevent": {
|
||||||
"name": "Learn sendevent",
|
"name": "Learn sendevent",
|
||||||
"description": "Translates a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of calling this service."
|
"description": "Translates a key press on a remote into ADB 'sendevent' commands. You must press one button on the remote within 8 seconds of performing this action."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
|
@ -16,8 +16,6 @@ from homeassistant.const import (
|
|||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
EVENT_STATE_CHANGED,
|
EVENT_STATE_CHANGED,
|
||||||
STATE_UNAVAILABLE,
|
|
||||||
STATE_UNKNOWN,
|
|
||||||
)
|
)
|
||||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant
|
from homeassistant.core import Event, EventStateChangedData, HomeAssistant
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
@ -121,7 +119,7 @@ class KafkaManager:
|
|||||||
state = event.data["new_state"]
|
state = event.data["new_state"]
|
||||||
if (
|
if (
|
||||||
state is None
|
state is None
|
||||||
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
|
or state.state == ""
|
||||||
or not self._entities_filter(state.entity_id)
|
or not self._entities_filter(state.entity_id)
|
||||||
):
|
):
|
||||||
return None
|
return None
|
||||||
@ -141,7 +139,8 @@ class KafkaManager:
|
|||||||
|
|
||||||
async def write(self, event: Event[EventStateChangedData]) -> None:
|
async def write(self, event: Event[EventStateChangedData]) -> None:
|
||||||
"""Write a binary payload to Kafka."""
|
"""Write a binary payload to Kafka."""
|
||||||
|
key = event.data["entity_id"].encode("utf-8")
|
||||||
payload = self._encode_event(event)
|
payload = self._encode_event(event)
|
||||||
|
|
||||||
if payload:
|
if payload:
|
||||||
await self._producer.send_and_wait(self._topic, payload)
|
await self._producer.send_and_wait(self._topic, payload, key)
|
||||||
|
@ -4,3 +4,6 @@ from typing import Final
|
|||||||
|
|
||||||
DOMAIN: Final = "apcupsd"
|
DOMAIN: Final = "apcupsd"
|
||||||
CONNECTION_TIMEOUT: int = 10
|
CONNECTION_TIMEOUT: int = 10
|
||||||
|
|
||||||
|
# Field name of last self test retrieved from apcupsd.
|
||||||
|
LASTSTEST: Final = "laststest"
|
||||||
|
@ -13,6 +13,7 @@ from homeassistant.components.sensor import (
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
|
STATE_UNKNOWN,
|
||||||
UnitOfApparentPower,
|
UnitOfApparentPower,
|
||||||
UnitOfElectricCurrent,
|
UnitOfElectricCurrent,
|
||||||
UnitOfElectricPotential,
|
UnitOfElectricPotential,
|
||||||
@ -25,7 +26,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN, LASTSTEST
|
||||||
from .coordinator import APCUPSdCoordinator
|
from .coordinator import APCUPSdCoordinator
|
||||||
|
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@ -156,8 +157,8 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"laststest": SensorEntityDescription(
|
LASTSTEST: SensorEntityDescription(
|
||||||
key="laststest",
|
key=LASTSTEST,
|
||||||
translation_key="last_self_test",
|
translation_key="last_self_test",
|
||||||
),
|
),
|
||||||
"lastxfer": SensorEntityDescription(
|
"lastxfer": SensorEntityDescription(
|
||||||
@ -417,7 +418,12 @@ async def async_setup_entry(
|
|||||||
available_resources: set[str] = {k.lower() for k, _ in coordinator.data.items()}
|
available_resources: set[str] = {k.lower() for k, _ in coordinator.data.items()}
|
||||||
|
|
||||||
entities = []
|
entities = []
|
||||||
for resource in available_resources:
|
|
||||||
|
# "laststest" is a special sensor that only appears when the APC UPS daemon has done a
|
||||||
|
# periodical (or manual) self test since last daemon restart. It might not be available
|
||||||
|
# when we set up the integration, and we do not know if it would ever be available. Here we
|
||||||
|
# add it anyway and mark it as unknown initially.
|
||||||
|
for resource in available_resources | {LASTSTEST}:
|
||||||
if resource not in SENSORS:
|
if resource not in SENSORS:
|
||||||
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
||||||
continue
|
continue
|
||||||
@ -473,6 +479,14 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
|||||||
def _update_attrs(self) -> None:
|
def _update_attrs(self) -> None:
|
||||||
"""Update sensor attributes based on coordinator data."""
|
"""Update sensor attributes based on coordinator data."""
|
||||||
key = self.entity_description.key.upper()
|
key = self.entity_description.key.upper()
|
||||||
|
# For most sensors the key will always be available for each refresh. However, some sensors
|
||||||
|
# (e.g., "laststest") will only appear after certain event occurs (e.g., a self test is
|
||||||
|
# performed) and may disappear again after certain event. So we mark the state as "unknown"
|
||||||
|
# when it becomes unknown after such events.
|
||||||
|
if key not in self.coordinator.data:
|
||||||
|
self._attr_native_value = STATE_UNKNOWN
|
||||||
|
return
|
||||||
|
|
||||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||||
if not self.native_unit_of_measurement:
|
if not self.native_unit_of_measurement:
|
||||||
self._attr_native_unit_of_measurement = inferred_unit
|
self._attr_native_unit_of_measurement = inferred_unit
|
||||||
|
@ -45,7 +45,7 @@ from homeassistant.exceptions import (
|
|||||||
TemplateError,
|
TemplateError,
|
||||||
Unauthorized,
|
Unauthorized,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, recorder, template
|
||||||
from homeassistant.helpers.json import json_dumps, json_fragment
|
from homeassistant.helpers.json import json_dumps, json_fragment
|
||||||
from homeassistant.helpers.service import async_get_all_descriptions
|
from homeassistant.helpers.service import async_get_all_descriptions
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
@ -119,7 +119,10 @@ class APICoreStateView(HomeAssistantView):
|
|||||||
to check if Home Assistant is running.
|
to check if Home Assistant is running.
|
||||||
"""
|
"""
|
||||||
hass = request.app[KEY_HASS]
|
hass = request.app[KEY_HASS]
|
||||||
return self.json({"state": hass.state.value})
|
migration = recorder.async_migration_in_progress(hass)
|
||||||
|
live = recorder.async_migration_is_live(hass)
|
||||||
|
recorder_state = {"migration_in_progress": migration, "migration_is_live": live}
|
||||||
|
return self.json({"state": hass.state.value, "recorder_state": recorder_state})
|
||||||
|
|
||||||
|
|
||||||
class APIEventStream(HomeAssistantView):
|
class APIEventStream(HomeAssistantView):
|
||||||
@ -387,6 +390,27 @@ class APIDomainServicesView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
context = self.context(request)
|
context = self.context(request)
|
||||||
|
if not hass.services.has_service(domain, service):
|
||||||
|
raise HTTPBadRequest from ServiceNotFound(domain, service)
|
||||||
|
|
||||||
|
if response_requested := "return_response" in request.query:
|
||||||
|
if (
|
||||||
|
hass.services.supports_response(domain, service)
|
||||||
|
is ha.SupportsResponse.NONE
|
||||||
|
):
|
||||||
|
return self.json_message(
|
||||||
|
"Service does not support responses. Remove return_response from request.",
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
elif (
|
||||||
|
hass.services.supports_response(domain, service) is ha.SupportsResponse.ONLY
|
||||||
|
):
|
||||||
|
return self.json_message(
|
||||||
|
"Service call requires responses but caller did not ask for responses. "
|
||||||
|
"Add ?return_response to query parameters.",
|
||||||
|
HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
changed_states: list[json_fragment] = []
|
changed_states: list[json_fragment] = []
|
||||||
|
|
||||||
@ha.callback
|
@ha.callback
|
||||||
@ -403,13 +427,14 @@ class APIDomainServicesView(HomeAssistantView):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# shield the service call from cancellation on connection drop
|
# shield the service call from cancellation on connection drop
|
||||||
await shield(
|
response = await shield(
|
||||||
hass.services.async_call(
|
hass.services.async_call(
|
||||||
domain,
|
domain,
|
||||||
service,
|
service,
|
||||||
data, # type: ignore[arg-type]
|
data, # type: ignore[arg-type]
|
||||||
blocking=True,
|
blocking=True,
|
||||||
context=context,
|
context=context,
|
||||||
|
return_response=response_requested,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (vol.Invalid, ServiceNotFound) as ex:
|
except (vol.Invalid, ServiceNotFound) as ex:
|
||||||
@ -417,6 +442,11 @@ class APIDomainServicesView(HomeAssistantView):
|
|||||||
finally:
|
finally:
|
||||||
cancel_listen()
|
cancel_listen()
|
||||||
|
|
||||||
|
if response_requested:
|
||||||
|
return self.json(
|
||||||
|
{"changed_states": changed_states, "service_response": response}
|
||||||
|
)
|
||||||
|
|
||||||
return self.json(changed_states)
|
return self.json(changed_states)
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ from .coordinator import AprilaireCoordinator
|
|||||||
|
|
||||||
PLATFORMS: list[Platform] = [
|
PLATFORMS: list[Platform] = [
|
||||||
Platform.CLIMATE,
|
Platform.CLIMATE,
|
||||||
|
Platform.HUMIDIFIER,
|
||||||
Platform.SELECT,
|
Platform.SELECT,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
]
|
]
|
||||||
|
@ -62,7 +62,7 @@ class AprilaireConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
return self.async_create_entry(title="Aprilaire", data=user_input)
|
return self.async_create_entry(title="AprilAire", data=user_input)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
|
194
homeassistant/components/aprilaire/humidifier.py
Normal file
194
homeassistant/components/aprilaire/humidifier.py
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
"""The Aprilaire humidifier component."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
|
from homeassistant.components.humidifier import (
|
||||||
|
HumidifierAction,
|
||||||
|
HumidifierDeviceClass,
|
||||||
|
HumidifierEntity,
|
||||||
|
HumidifierEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.typing import StateType
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AprilaireCoordinator
|
||||||
|
from .entity import BaseAprilaireEntity
|
||||||
|
|
||||||
|
HUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = {
|
||||||
|
0: HumidifierAction.IDLE,
|
||||||
|
1: HumidifierAction.IDLE,
|
||||||
|
2: HumidifierAction.HUMIDIFYING,
|
||||||
|
3: HumidifierAction.OFF,
|
||||||
|
}
|
||||||
|
|
||||||
|
DEHUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = {
|
||||||
|
0: HumidifierAction.IDLE,
|
||||||
|
1: HumidifierAction.IDLE,
|
||||||
|
2: HumidifierAction.DRYING,
|
||||||
|
3: HumidifierAction.DRYING,
|
||||||
|
4: HumidifierAction.OFF,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Aprilaire humidifier devices."""
|
||||||
|
|
||||||
|
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||||
|
|
||||||
|
assert config_entry.unique_id is not None
|
||||||
|
|
||||||
|
descriptions: list[AprilaireHumidifierDescription] = []
|
||||||
|
|
||||||
|
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (0, 1, 2):
|
||||||
|
descriptions.append(
|
||||||
|
AprilaireHumidifierDescription(
|
||||||
|
key="humidifier",
|
||||||
|
translation_key="humidifier",
|
||||||
|
device_class=HumidifierDeviceClass.HUMIDIFIER,
|
||||||
|
action_key=Attribute.HUMIDIFICATION_STATUS,
|
||||||
|
action_map=HUMIDIFIER_ACTION_MAP,
|
||||||
|
current_humidity_key=Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE,
|
||||||
|
target_humidity_key=Attribute.HUMIDIFICATION_SETPOINT,
|
||||||
|
min_humidity=10,
|
||||||
|
max_humidity=50,
|
||||||
|
default_humidity=30,
|
||||||
|
set_humidity_fn=coordinator.client.set_humidification_setpoint,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) in (0, 1):
|
||||||
|
descriptions.append(
|
||||||
|
AprilaireHumidifierDescription(
|
||||||
|
key="dehumidifier",
|
||||||
|
translation_key="dehumidifier",
|
||||||
|
device_class=HumidifierDeviceClass.DEHUMIDIFIER,
|
||||||
|
action_key=Attribute.DEHUMIDIFICATION_STATUS,
|
||||||
|
action_map=DEHUMIDIFIER_ACTION_MAP,
|
||||||
|
current_humidity_key=Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE,
|
||||||
|
target_humidity_key=Attribute.DEHUMIDIFICATION_SETPOINT,
|
||||||
|
min_humidity=40,
|
||||||
|
max_humidity=90,
|
||||||
|
default_humidity=60,
|
||||||
|
set_humidity_fn=coordinator.client.set_dehumidification_setpoint,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
AprilaireHumidifierEntity(coordinator, description, config_entry.unique_id)
|
||||||
|
for description in descriptions
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AprilaireHumidifierDescription(HumidifierEntityDescription):
|
||||||
|
"""Class describing Aprilaire humidifier entities."""
|
||||||
|
|
||||||
|
action_key: str
|
||||||
|
action_map: dict[StateType, HumidifierAction]
|
||||||
|
current_humidity_key: str
|
||||||
|
target_humidity_key: str
|
||||||
|
min_humidity: int
|
||||||
|
max_humidity: int
|
||||||
|
default_humidity: int
|
||||||
|
set_humidity_fn: Callable[[int], Awaitable]
|
||||||
|
|
||||||
|
|
||||||
|
class AprilaireHumidifierEntity(BaseAprilaireEntity, HumidifierEntity):
|
||||||
|
"""Base humidity entity for Aprilaire."""
|
||||||
|
|
||||||
|
entity_description: AprilaireHumidifierDescription
|
||||||
|
last_target_humidity: int | None = None
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AprilaireCoordinator,
|
||||||
|
description: AprilaireHumidifierDescription,
|
||||||
|
unique_id: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a select for an Aprilaire device."""
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
|
||||||
|
super().__init__(coordinator, unique_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def action(self) -> HumidifierAction | None:
|
||||||
|
"""Get the current action."""
|
||||||
|
|
||||||
|
action = self.coordinator.data.get(self.entity_description.action_key)
|
||||||
|
|
||||||
|
return self.entity_description.action_map.get(action, HumidifierAction.OFF)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Get whether the humidifier is on."""
|
||||||
|
|
||||||
|
return self.target_humidity is not None and self.target_humidity > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_humidity(self) -> float | None:
|
||||||
|
"""Get the current humidity."""
|
||||||
|
|
||||||
|
return cast(
|
||||||
|
float,
|
||||||
|
self.coordinator.data.get(self.entity_description.current_humidity_key),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_humidity(self) -> float | None:
|
||||||
|
"""Get the target humidity."""
|
||||||
|
|
||||||
|
target_humidity = cast(
|
||||||
|
float,
|
||||||
|
self.coordinator.data.get(self.entity_description.target_humidity_key),
|
||||||
|
)
|
||||||
|
|
||||||
|
if target_humidity is not None and target_humidity > 0:
|
||||||
|
self.last_target_humidity = int(target_humidity)
|
||||||
|
|
||||||
|
return target_humidity
|
||||||
|
|
||||||
|
@property
|
||||||
|
def min_humidity(self) -> float:
|
||||||
|
"""Return the minimum humidity."""
|
||||||
|
|
||||||
|
return self.entity_description.min_humidity
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_humidity(self) -> float:
|
||||||
|
"""Return the maximum humidity."""
|
||||||
|
|
||||||
|
return self.entity_description.max_humidity
|
||||||
|
|
||||||
|
async def async_set_humidity(self, humidity: int) -> None:
|
||||||
|
"""Set the humidity."""
|
||||||
|
|
||||||
|
await self.entity_description.set_humidity_fn(humidity)
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the device on."""
|
||||||
|
|
||||||
|
if self.last_target_humidity is None or self.last_target_humidity == 0:
|
||||||
|
target_humidity = self.entity_description.default_humidity
|
||||||
|
else:
|
||||||
|
target_humidity = self.last_target_humidity
|
||||||
|
|
||||||
|
await self.entity_description.set_humidity_fn(target_humidity)
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the device off."""
|
||||||
|
|
||||||
|
await self.entity_description.set_humidity_fn(0)
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"domain": "aprilaire",
|
"domain": "aprilaire",
|
||||||
"name": "Aprilaire",
|
"name": "AprilAire",
|
||||||
"codeowners": ["@chamberlain2007"],
|
"codeowners": ["@chamberlain2007"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/aprilaire",
|
"documentation": "https://www.home-assistant.io/integrations/aprilaire",
|
||||||
|
@ -24,6 +24,14 @@
|
|||||||
"name": "Thermostat"
|
"name": "Thermostat"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"humidifier": {
|
||||||
|
"humidifier": {
|
||||||
|
"name": "[%key:component::humidifier::title%]"
|
||||||
|
},
|
||||||
|
"dehumidifier": {
|
||||||
|
"name": "[%key:component::humidifier::entity_component::dehumidifier::name%]"
|
||||||
|
}
|
||||||
|
},
|
||||||
"select": {
|
"select": {
|
||||||
"air_cleaning_event": {
|
"air_cleaning_event": {
|
||||||
"name": "Air cleaning event",
|
"name": "Air cleaning event",
|
||||||
|
@ -7,12 +7,13 @@ from dataclasses import dataclass
|
|||||||
from APsystemsEZ1 import APsystemsEZ1M
|
from APsystemsEZ1 import APsystemsEZ1M
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_IP_ADDRESS, Platform
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .const import DEFAULT_PORT
|
||||||
from .coordinator import ApSystemsDataCoordinator
|
from .coordinator import ApSystemsDataCoordinator
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -28,7 +29,11 @@ type ApSystemsConfigEntry = ConfigEntry[ApSystemsData]
|
|||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) -> bool:
|
||||||
"""Set up this integration using UI."""
|
"""Set up this integration using UI."""
|
||||||
api = APsystemsEZ1M(ip_address=entry.data[CONF_IP_ADDRESS], timeout=8)
|
api = APsystemsEZ1M(
|
||||||
|
ip_address=entry.data[CONF_IP_ADDRESS],
|
||||||
|
port=entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||||
|
timeout=8,
|
||||||
|
)
|
||||||
coordinator = ApSystemsDataCoordinator(hass, api)
|
coordinator = ApSystemsDataCoordinator(hass, api)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
assert entry.unique_id
|
assert entry.unique_id
|
||||||
|
@ -7,14 +7,16 @@ from APsystemsEZ1 import APsystemsEZ1M
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_IP_ADDRESS
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DEFAULT_PORT, DOMAIN
|
||||||
|
|
||||||
DATA_SCHEMA = vol.Schema(
|
DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_IP_ADDRESS): str,
|
vol.Required(CONF_IP_ADDRESS): cv.string,
|
||||||
|
vol.Optional(CONF_PORT): cv.port,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -32,7 +34,11 @@ class APsystemsLocalAPIFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
session = async_get_clientsession(self.hass, False)
|
session = async_get_clientsession(self.hass, False)
|
||||||
api = APsystemsEZ1M(user_input[CONF_IP_ADDRESS], session=session)
|
api = APsystemsEZ1M(
|
||||||
|
ip_address=user_input[CONF_IP_ADDRESS],
|
||||||
|
port=user_input.get(CONF_PORT, DEFAULT_PORT),
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
device_info = await api.get_device_info()
|
device_info = await api.get_device_info()
|
||||||
except (TimeoutError, ClientConnectionError):
|
except (TimeoutError, ClientConnectionError):
|
||||||
|
@ -4,3 +4,4 @@ from logging import Logger, getLogger
|
|||||||
|
|
||||||
LOGGER: Logger = getLogger(__package__)
|
LOGGER: Logger = getLogger(__package__)
|
||||||
DOMAIN = "apsystems"
|
DOMAIN = "apsystems"
|
||||||
|
DEFAULT_PORT = 8050
|
||||||
|
@ -3,7 +3,11 @@
|
|||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"ip_address": "[%key:common::config_flow::data::ip%]"
|
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||||
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"port": "The integration will default to 8050, if not set, which should be suitable for most installs"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -16,18 +20,43 @@
|
|||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"total_power": { "name": "Total power" },
|
"total_power": {
|
||||||
"total_power_p1": { "name": "Power of P1" },
|
"name": "Total power"
|
||||||
"total_power_p2": { "name": "Power of P2" },
|
},
|
||||||
"lifetime_production": { "name": "Total lifetime production" },
|
"total_power_p1": {
|
||||||
"lifetime_production_p1": { "name": "Lifetime production of P1" },
|
"name": "Power of P1"
|
||||||
"lifetime_production_p2": { "name": "Lifetime production of P2" },
|
},
|
||||||
"today_production": { "name": "Production of today" },
|
"total_power_p2": {
|
||||||
"today_production_p1": { "name": "Production of today from P1" },
|
"name": "Power of P2"
|
||||||
"today_production_p2": { "name": "Production of today from P2" }
|
},
|
||||||
|
"lifetime_production": {
|
||||||
|
"name": "Total lifetime production"
|
||||||
|
},
|
||||||
|
"lifetime_production_p1": {
|
||||||
|
"name": "Lifetime production of P1"
|
||||||
|
},
|
||||||
|
"lifetime_production_p2": {
|
||||||
|
"name": "Lifetime production of P2"
|
||||||
|
},
|
||||||
|
"today_production": {
|
||||||
|
"name": "Production of today"
|
||||||
|
},
|
||||||
|
"today_production_p1": {
|
||||||
|
"name": "Production of today from P1"
|
||||||
|
},
|
||||||
|
"today_production_p2": {
|
||||||
|
"name": "Production of today from P2"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"number": {
|
"number": {
|
||||||
"max_output": { "name": "Max output" }
|
"max_output": {
|
||||||
|
"name": "Max output"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"switch": {
|
||||||
|
"inverter_status": {
|
||||||
|
"name": "Inverter status"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
56
homeassistant/components/apsystems/switch.py
Normal file
56
homeassistant/components/apsystems/switch.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
"""The power switch which can be toggled via the APsystems local API integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aiohttp.client_exceptions import ClientConnectionError
|
||||||
|
from APsystemsEZ1 import Status
|
||||||
|
|
||||||
|
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from . import ApSystemsConfigEntry, ApSystemsData
|
||||||
|
from .entity import ApSystemsEntity
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ApSystemsConfigEntry,
|
||||||
|
add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the switch platform."""
|
||||||
|
|
||||||
|
add_entities([ApSystemsInverterSwitch(config_entry.runtime_data)], True)
|
||||||
|
|
||||||
|
|
||||||
|
class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||||
|
"""The switch class for APSystems switches."""
|
||||||
|
|
||||||
|
_attr_device_class = SwitchDeviceClass.SWITCH
|
||||||
|
_attr_translation_key = "inverter_status"
|
||||||
|
|
||||||
|
def __init__(self, data: ApSystemsData) -> None:
|
||||||
|
"""Initialize the switch."""
|
||||||
|
super().__init__(data)
|
||||||
|
self._api = data.coordinator.api
|
||||||
|
self._attr_unique_id = f"{data.device_id}_inverter_status"
|
||||||
|
|
||||||
|
async def async_update(self) -> None:
|
||||||
|
"""Update switch status and availability."""
|
||||||
|
try:
|
||||||
|
status = await self._api.get_device_power_status()
|
||||||
|
except (TimeoutError, ClientConnectionError):
|
||||||
|
self._attr_available = False
|
||||||
|
else:
|
||||||
|
self._attr_available = True
|
||||||
|
self._attr_is_on = status == Status.normal
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the switch on."""
|
||||||
|
await self._api.set_device_power_status(0)
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the switch off."""
|
||||||
|
await self._api.set_device_power_status(1)
|
@ -8,5 +8,5 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioaquacell"],
|
"loggers": ["aioaquacell"],
|
||||||
"requirements": ["aioaquacell==0.1.8"]
|
"requirements": ["aioaquacell==0.2.0"]
|
||||||
}
|
}
|
||||||
|
@ -13,17 +13,17 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
from homeassistant.helpers.typing import ConfigType
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
DEFAULT_SCAN_INTERVAL,
|
DEFAULT_SCAN_INTERVAL,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
DOMAIN_DATA_ENTRIES,
|
|
||||||
SIGNAL_CLIENT_DATA,
|
SIGNAL_CLIENT_DATA,
|
||||||
SIGNAL_CLIENT_STARTED,
|
SIGNAL_CLIENT_STARTED,
|
||||||
SIGNAL_CLIENT_STOPPED,
|
SIGNAL_CLIENT_STOPPED,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type ArcamFmjConfigEntry = ConfigEntry[Client]
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||||
@ -31,34 +31,21 @@ CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
|||||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ArcamFmjConfigEntry) -> bool:
|
||||||
"""Set up the component."""
|
|
||||||
hass.data[DOMAIN_DATA_ENTRIES] = {}
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|
||||||
"""Set up config entry."""
|
"""Set up config entry."""
|
||||||
entries = hass.data[DOMAIN_DATA_ENTRIES]
|
entry.runtime_data = Client(entry.data[CONF_HOST], entry.data[CONF_PORT])
|
||||||
|
|
||||||
client = Client(entry.data[CONF_HOST], entry.data[CONF_PORT])
|
|
||||||
entries[entry.entry_id] = client
|
|
||||||
|
|
||||||
entry.async_create_background_task(
|
entry.async_create_background_task(
|
||||||
hass, _run_client(hass, client, DEFAULT_SCAN_INTERVAL), "arcam_fmj"
|
hass, _run_client(hass, entry.runtime_data, DEFAULT_SCAN_INTERVAL), "arcam_fmj"
|
||||||
)
|
)
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Cleanup before removing config entry."""
|
"""Cleanup before removing config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
hass.data[DOMAIN_DATA_ENTRIES].pop(entry.entry_id)
|
|
||||||
|
|
||||||
return unload_ok
|
|
||||||
|
|
||||||
|
|
||||||
async def _run_client(hass: HomeAssistant, client: Client, interval: float) -> None:
|
async def _run_client(hass: HomeAssistant, client: Client, interval: float) -> None:
|
||||||
|
@ -10,18 +10,11 @@ from arcam.fmj.utils import get_uniqueid_from_host, get_uniqueid_from_udn
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import ssdp
|
from homeassistant.components import ssdp
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN, DOMAIN_DATA_ENTRIES
|
from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN
|
||||||
|
|
||||||
|
|
||||||
def get_entry_client(hass: HomeAssistant, entry: ConfigEntry) -> Client:
|
|
||||||
"""Retrieve client associated with a config entry."""
|
|
||||||
client: Client = hass.data[DOMAIN_DATA_ENTRIES][entry.entry_id]
|
|
||||||
return client
|
|
||||||
|
|
||||||
|
|
||||||
class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||||
|
@ -11,5 +11,3 @@ EVENT_TURN_ON = "arcam_fmj.turn_on"
|
|||||||
DEFAULT_PORT = 50000
|
DEFAULT_PORT = 50000
|
||||||
DEFAULT_NAME = "Arcam FMJ"
|
DEFAULT_NAME = "Arcam FMJ"
|
||||||
DEFAULT_SCAN_INTERVAL = 5
|
DEFAULT_SCAN_INTERVAL = 5
|
||||||
|
|
||||||
DOMAIN_DATA_ENTRIES = f"{DOMAIN}.entries"
|
|
||||||
|
@ -19,7 +19,6 @@ from homeassistant.components.media_player import (
|
|||||||
MediaType,
|
MediaType,
|
||||||
)
|
)
|
||||||
from homeassistant.components.media_player.errors import BrowseError
|
from homeassistant.components.media_player.errors import BrowseError
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import ATTR_ENTITY_ID
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
@ -27,7 +26,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
|||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from .config_flow import get_entry_client
|
from . import ArcamFmjConfigEntry
|
||||||
from .const import (
|
from .const import (
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
EVENT_TURN_ON,
|
EVENT_TURN_ON,
|
||||||
@ -41,12 +40,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: ConfigEntry,
|
config_entry: ArcamFmjConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the configuration entry."""
|
"""Set up the configuration entry."""
|
||||||
|
|
||||||
client = get_entry_client(hass, config_entry)
|
client = config_entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
|
@ -19,6 +19,9 @@ class AsekoEntity(CoordinatorEntity[AsekoDataUpdateCoordinator]):
|
|||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._unit = unit
|
self._unit = unit
|
||||||
|
|
||||||
|
if self._unit.type == "Remote":
|
||||||
|
self._device_model = "ASIN Pool"
|
||||||
|
else:
|
||||||
self._device_model = f"ASIN AQUA {self._unit.type}"
|
self._device_model = f"ASIN AQUA {self._unit.type}"
|
||||||
self._device_name = self._unit.name if self._unit.name else self._device_model
|
self._device_name = self._unit.name if self._unit.name else self._device_model
|
||||||
|
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioaseko"],
|
"loggers": ["aioaseko"],
|
||||||
"requirements": ["aioaseko==0.1.1"]
|
"requirements": ["aioaseko==0.2.0"]
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,10 @@ from .const import (
|
|||||||
DATA_LAST_WAKE_UP,
|
DATA_LAST_WAKE_UP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
EVENT_RECORDING,
|
EVENT_RECORDING,
|
||||||
|
SAMPLE_CHANNELS,
|
||||||
|
SAMPLE_RATE,
|
||||||
|
SAMPLE_WIDTH,
|
||||||
|
SAMPLES_PER_CHUNK,
|
||||||
)
|
)
|
||||||
from .error import PipelineNotFound
|
from .error import PipelineNotFound
|
||||||
from .pipeline import (
|
from .pipeline import (
|
||||||
@ -53,6 +57,10 @@ __all__ = (
|
|||||||
"PipelineNotFound",
|
"PipelineNotFound",
|
||||||
"WakeWordSettings",
|
"WakeWordSettings",
|
||||||
"EVENT_RECORDING",
|
"EVENT_RECORDING",
|
||||||
|
"SAMPLES_PER_CHUNK",
|
||||||
|
"SAMPLE_RATE",
|
||||||
|
"SAMPLE_WIDTH",
|
||||||
|
"SAMPLE_CHANNELS",
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
|
72
homeassistant/components/assist_pipeline/audio_enhancer.py
Normal file
72
homeassistant/components/assist_pipeline/audio_enhancer.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"""Audio enhancement for Assist."""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pymicro_vad import MicroVad
|
||||||
|
|
||||||
|
from .const import BYTES_PER_CHUNK
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, slots=True)
|
||||||
|
class EnhancedAudioChunk:
|
||||||
|
"""Enhanced audio chunk and metadata."""
|
||||||
|
|
||||||
|
audio: bytes
|
||||||
|
"""Raw PCM audio @ 16Khz with 16-bit mono samples"""
|
||||||
|
|
||||||
|
timestamp_ms: int
|
||||||
|
"""Timestamp relative to start of audio stream (milliseconds)"""
|
||||||
|
|
||||||
|
is_speech: bool | None
|
||||||
|
"""True if audio chunk likely contains speech, False if not, None if unknown"""
|
||||||
|
|
||||||
|
|
||||||
|
class AudioEnhancer(ABC):
|
||||||
|
"""Base class for audio enhancement."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||||
|
) -> None:
|
||||||
|
"""Initialize audio enhancer."""
|
||||||
|
self.auto_gain = auto_gain
|
||||||
|
self.noise_suppression = noise_suppression
|
||||||
|
self.is_vad_enabled = is_vad_enabled
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||||
|
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||||
|
|
||||||
|
|
||||||
|
class MicroVadEnhancer(AudioEnhancer):
|
||||||
|
"""Audio enhancer that just runs microVAD."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||||
|
) -> None:
|
||||||
|
"""Initialize audio enhancer."""
|
||||||
|
super().__init__(auto_gain, noise_suppression, is_vad_enabled)
|
||||||
|
|
||||||
|
self.vad: MicroVad | None = None
|
||||||
|
self.threshold = 0.5
|
||||||
|
|
||||||
|
if self.is_vad_enabled:
|
||||||
|
self.vad = MicroVad()
|
||||||
|
_LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold)
|
||||||
|
|
||||||
|
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||||
|
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||||
|
is_speech: bool | None = None
|
||||||
|
|
||||||
|
if self.vad is not None:
|
||||||
|
# Run VAD
|
||||||
|
assert len(audio) == BYTES_PER_CHUNK
|
||||||
|
speech_prob = self.vad.Process10ms(audio)
|
||||||
|
is_speech = speech_prob > self.threshold
|
||||||
|
|
||||||
|
return EnhancedAudioChunk(
|
||||||
|
audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech
|
||||||
|
)
|
@ -15,3 +15,10 @@ DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up"
|
|||||||
WAKE_WORD_COOLDOWN = 2 # seconds
|
WAKE_WORD_COOLDOWN = 2 # seconds
|
||||||
|
|
||||||
EVENT_RECORDING = f"{DOMAIN}_recording"
|
EVENT_RECORDING = f"{DOMAIN}_recording"
|
||||||
|
|
||||||
|
SAMPLE_RATE = 16000 # hertz
|
||||||
|
SAMPLE_WIDTH = 2 # bytes
|
||||||
|
SAMPLE_CHANNELS = 1 # mono
|
||||||
|
MS_PER_CHUNK = 10
|
||||||
|
SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
||||||
|
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||||
|
@ -4,7 +4,8 @@
|
|||||||
"codeowners": ["@balloob", "@synesthesiam"],
|
"codeowners": ["@balloob", "@synesthesiam"],
|
||||||
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
||||||
|
"integration_type": "system",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["webrtc-noise-gain==1.2.3"]
|
"requirements": ["pymicro-vad==1.0.1"]
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
import array
|
import array
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections import defaultdict, deque
|
from collections import defaultdict, deque
|
||||||
from collections.abc import AsyncIterable, Callable, Iterable
|
from collections.abc import AsyncGenerator, AsyncIterable, Callable
|
||||||
from dataclasses import asdict, dataclass, field
|
from dataclasses import asdict, dataclass, field
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
import logging
|
import logging
|
||||||
@ -13,15 +13,11 @@ from pathlib import Path
|
|||||||
from queue import Empty, Queue
|
from queue import Empty, Queue
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
import time
|
import time
|
||||||
from typing import TYPE_CHECKING, Any, Final, Literal, cast
|
from typing import Any, Literal, cast
|
||||||
import wave
|
import wave
|
||||||
|
|
||||||
from typing_extensions import AsyncGenerator
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from webrtc_noise_gain import AudioProcessor
|
|
||||||
|
|
||||||
from homeassistant.components import (
|
from homeassistant.components import (
|
||||||
conversation,
|
conversation,
|
||||||
media_source,
|
media_source,
|
||||||
@ -53,12 +49,19 @@ from homeassistant.util import (
|
|||||||
)
|
)
|
||||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||||
|
|
||||||
|
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer
|
||||||
from .const import (
|
from .const import (
|
||||||
|
BYTES_PER_CHUNK,
|
||||||
CONF_DEBUG_RECORDING_DIR,
|
CONF_DEBUG_RECORDING_DIR,
|
||||||
DATA_CONFIG,
|
DATA_CONFIG,
|
||||||
DATA_LAST_WAKE_UP,
|
DATA_LAST_WAKE_UP,
|
||||||
DATA_MIGRATIONS,
|
DATA_MIGRATIONS,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
MS_PER_CHUNK,
|
||||||
|
SAMPLE_CHANNELS,
|
||||||
|
SAMPLE_RATE,
|
||||||
|
SAMPLE_WIDTH,
|
||||||
|
SAMPLES_PER_CHUNK,
|
||||||
WAKE_WORD_COOLDOWN,
|
WAKE_WORD_COOLDOWN,
|
||||||
)
|
)
|
||||||
from .error import (
|
from .error import (
|
||||||
@ -112,14 +115,14 @@ STORED_PIPELINE_RUNS = 10
|
|||||||
|
|
||||||
SAVE_DELAY = 10
|
SAVE_DELAY = 10
|
||||||
|
|
||||||
AUDIO_PROCESSOR_SAMPLES: Final = 160 # 10 ms @ 16 Khz
|
|
||||||
AUDIO_PROCESSOR_BYTES: Final = AUDIO_PROCESSOR_SAMPLES * 2 # 16-bit samples
|
|
||||||
|
|
||||||
|
@callback
|
||||||
async def _async_resolve_default_pipeline_settings(
|
def _async_resolve_default_pipeline_settings(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
stt_engine_id: str | None,
|
*,
|
||||||
tts_engine_id: str | None,
|
conversation_engine_id: str | None = None,
|
||||||
|
stt_engine_id: str | None = None,
|
||||||
|
tts_engine_id: str | None = None,
|
||||||
pipeline_name: str,
|
pipeline_name: str,
|
||||||
) -> dict[str, str | None]:
|
) -> dict[str, str | None]:
|
||||||
"""Resolve settings for a default pipeline.
|
"""Resolve settings for a default pipeline.
|
||||||
@ -137,12 +140,13 @@ async def _async_resolve_default_pipeline_settings(
|
|||||||
wake_word_entity = None
|
wake_word_entity = None
|
||||||
wake_word_id = None
|
wake_word_id = None
|
||||||
|
|
||||||
|
if conversation_engine_id is None:
|
||||||
|
conversation_engine_id = conversation.HOME_ASSISTANT_AGENT
|
||||||
|
|
||||||
# Find a matching language supported by the Home Assistant conversation agent
|
# Find a matching language supported by the Home Assistant conversation agent
|
||||||
conversation_languages = language_util.matches(
|
conversation_languages = language_util.matches(
|
||||||
hass.config.language,
|
hass.config.language,
|
||||||
await conversation.async_get_conversation_languages(
|
conversation.async_get_conversation_languages(hass, conversation_engine_id),
|
||||||
hass, conversation.HOME_ASSISTANT_AGENT
|
|
||||||
),
|
|
||||||
country=hass.config.country,
|
country=hass.config.country,
|
||||||
)
|
)
|
||||||
if conversation_languages:
|
if conversation_languages:
|
||||||
@ -201,7 +205,7 @@ async def _async_resolve_default_pipeline_settings(
|
|||||||
tts_engine_id = None
|
tts_engine_id = None
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"conversation_engine": conversation.HOME_ASSISTANT_AGENT,
|
"conversation_engine": conversation_engine_id,
|
||||||
"conversation_language": conversation_language,
|
"conversation_language": conversation_language,
|
||||||
"language": hass.config.language,
|
"language": hass.config.language,
|
||||||
"name": pipeline_name,
|
"name": pipeline_name,
|
||||||
@ -223,8 +227,8 @@ async def _async_create_default_pipeline(
|
|||||||
The default pipeline will use the homeassistant conversation agent and the
|
The default pipeline will use the homeassistant conversation agent and the
|
||||||
default stt / tts engines.
|
default stt / tts engines.
|
||||||
"""
|
"""
|
||||||
pipeline_settings = await _async_resolve_default_pipeline_settings(
|
pipeline_settings = _async_resolve_default_pipeline_settings(
|
||||||
hass, stt_engine_id=None, tts_engine_id=None, pipeline_name="Home Assistant"
|
hass, pipeline_name="Home Assistant"
|
||||||
)
|
)
|
||||||
return await pipeline_store.async_create_item(pipeline_settings)
|
return await pipeline_store.async_create_item(pipeline_settings)
|
||||||
|
|
||||||
@ -242,8 +246,11 @@ async def async_create_default_pipeline(
|
|||||||
"""
|
"""
|
||||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||||
pipeline_store = pipeline_data.pipeline_store
|
pipeline_store = pipeline_data.pipeline_store
|
||||||
pipeline_settings = await _async_resolve_default_pipeline_settings(
|
pipeline_settings = _async_resolve_default_pipeline_settings(
|
||||||
hass, stt_engine_id, tts_engine_id, pipeline_name=pipeline_name
|
hass,
|
||||||
|
stt_engine_id=stt_engine_id,
|
||||||
|
tts_engine_id=tts_engine_id,
|
||||||
|
pipeline_name=pipeline_name,
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
pipeline_settings["stt_engine"] != stt_engine_id
|
pipeline_settings["stt_engine"] != stt_engine_id
|
||||||
@ -253,6 +260,22 @@ async def async_create_default_pipeline(
|
|||||||
return await pipeline_store.async_create_item(pipeline_settings)
|
return await pipeline_store.async_create_item(pipeline_settings)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_get_pipeline_from_conversation_entity(
|
||||||
|
hass: HomeAssistant, entity_id: str
|
||||||
|
) -> Pipeline:
|
||||||
|
"""Get a pipeline by conversation entity ID."""
|
||||||
|
entity = hass.states.get(entity_id)
|
||||||
|
settings = _async_resolve_default_pipeline_settings(
|
||||||
|
hass,
|
||||||
|
pipeline_name=entity.name if entity else entity_id,
|
||||||
|
conversation_engine_id=entity_id,
|
||||||
|
)
|
||||||
|
settings["id"] = entity_id
|
||||||
|
|
||||||
|
return Pipeline.from_json(settings)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> Pipeline:
|
def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> Pipeline:
|
||||||
"""Get a pipeline by id or the preferred pipeline."""
|
"""Get a pipeline by id or the preferred pipeline."""
|
||||||
@ -262,6 +285,9 @@ def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> P
|
|||||||
# A pipeline was not specified, use the preferred one
|
# A pipeline was not specified, use the preferred one
|
||||||
pipeline_id = pipeline_data.pipeline_store.async_get_preferred_item()
|
pipeline_id = pipeline_data.pipeline_store.async_get_preferred_item()
|
||||||
|
|
||||||
|
if pipeline_id.startswith("conversation."):
|
||||||
|
return _async_get_pipeline_from_conversation_entity(hass, pipeline_id)
|
||||||
|
|
||||||
pipeline = pipeline_data.pipeline_store.data.get(pipeline_id)
|
pipeline = pipeline_data.pipeline_store.data.get(pipeline_id)
|
||||||
|
|
||||||
# If invalid pipeline ID was specified
|
# If invalid pipeline ID was specified
|
||||||
@ -274,11 +300,11 @@ def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> P
|
|||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_get_pipelines(hass: HomeAssistant) -> Iterable[Pipeline]:
|
def async_get_pipelines(hass: HomeAssistant) -> list[Pipeline]:
|
||||||
"""Get all pipelines."""
|
"""Get all pipelines."""
|
||||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||||
|
|
||||||
return pipeline_data.pipeline_store.data.values()
|
return list(pipeline_data.pipeline_store.data.values())
|
||||||
|
|
||||||
|
|
||||||
async def async_update_pipeline(
|
async def async_update_pipeline(
|
||||||
@ -304,6 +330,9 @@ async def async_update_pipeline(
|
|||||||
updates.pop("id")
|
updates.pop("id")
|
||||||
# Refactor this once we bump to Python 3.12
|
# Refactor this once we bump to Python 3.12
|
||||||
# and have https://peps.python.org/pep-0692/
|
# and have https://peps.python.org/pep-0692/
|
||||||
|
updates.update(
|
||||||
|
{
|
||||||
|
key: val
|
||||||
for key, val in (
|
for key, val in (
|
||||||
("conversation_engine", conversation_engine),
|
("conversation_engine", conversation_engine),
|
||||||
("conversation_language", conversation_language),
|
("conversation_language", conversation_language),
|
||||||
@ -316,9 +345,10 @@ async def async_update_pipeline(
|
|||||||
("tts_voice", tts_voice),
|
("tts_voice", tts_voice),
|
||||||
("wake_word_entity", wake_word_entity),
|
("wake_word_entity", wake_word_entity),
|
||||||
("wake_word_id", wake_word_id),
|
("wake_word_id", wake_word_id),
|
||||||
):
|
)
|
||||||
if val is not UNDEFINED:
|
if val is not UNDEFINED
|
||||||
updates[key] = val
|
}
|
||||||
|
)
|
||||||
|
|
||||||
await pipeline_data.pipeline_store.async_update_item(pipeline.id, updates)
|
await pipeline_data.pipeline_store.async_update_item(pipeline.id, updates)
|
||||||
|
|
||||||
@ -474,8 +504,8 @@ class AudioSettings:
|
|||||||
is_vad_enabled: bool = True
|
is_vad_enabled: bool = True
|
||||||
"""True if VAD is used to determine the end of the voice command."""
|
"""True if VAD is used to determine the end of the voice command."""
|
||||||
|
|
||||||
is_chunking_enabled: bool = True
|
silence_seconds: float = 0.5
|
||||||
"""True if audio is automatically split into 10 ms chunks (required for VAD, etc.)"""
|
"""Seconds of silence after voice command has ended."""
|
||||||
|
|
||||||
def __post_init__(self) -> None:
|
def __post_init__(self) -> None:
|
||||||
"""Verify settings post-initialization."""
|
"""Verify settings post-initialization."""
|
||||||
@ -485,9 +515,6 @@ class AudioSettings:
|
|||||||
if (self.auto_gain_dbfs < 0) or (self.auto_gain_dbfs > 31):
|
if (self.auto_gain_dbfs < 0) or (self.auto_gain_dbfs > 31):
|
||||||
raise ValueError("auto_gain_dbfs must be in [0, 31]")
|
raise ValueError("auto_gain_dbfs must be in [0, 31]")
|
||||||
|
|
||||||
if self.needs_processor and (not self.is_chunking_enabled):
|
|
||||||
raise ValueError("Chunking must be enabled for audio processing")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needs_processor(self) -> bool:
|
def needs_processor(self) -> bool:
|
||||||
"""True if an audio processor is needed."""
|
"""True if an audio processor is needed."""
|
||||||
@ -498,20 +525,6 @@ class AudioSettings:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, slots=True)
|
|
||||||
class ProcessedAudioChunk:
|
|
||||||
"""Processed audio chunk and metadata."""
|
|
||||||
|
|
||||||
audio: bytes
|
|
||||||
"""Raw PCM audio @ 16Khz with 16-bit mono samples"""
|
|
||||||
|
|
||||||
timestamp_ms: int
|
|
||||||
"""Timestamp relative to start of audio stream (milliseconds)"""
|
|
||||||
|
|
||||||
is_speech: bool | None
|
|
||||||
"""True if audio chunk likely contains speech, False if not, None if unknown"""
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class PipelineRun:
|
class PipelineRun:
|
||||||
"""Running context for a pipeline."""
|
"""Running context for a pipeline."""
|
||||||
@ -544,10 +557,12 @@ class PipelineRun:
|
|||||||
debug_recording_queue: Queue[str | bytes | None] | None = None
|
debug_recording_queue: Queue[str | bytes | None] | None = None
|
||||||
"""Queue to communicate with debug recording thread"""
|
"""Queue to communicate with debug recording thread"""
|
||||||
|
|
||||||
audio_processor: AudioProcessor | None = None
|
audio_enhancer: AudioEnhancer | None = None
|
||||||
"""VAD/noise suppression/auto gain"""
|
"""VAD/noise suppression/auto gain"""
|
||||||
|
|
||||||
audio_processor_buffer: AudioBuffer = field(init=False, repr=False)
|
audio_chunking_buffer: AudioBuffer = field(
|
||||||
|
default_factory=lambda: AudioBuffer(BYTES_PER_CHUNK)
|
||||||
|
)
|
||||||
"""Buffer used when splitting audio into chunks for audio processing"""
|
"""Buffer used when splitting audio into chunks for audio processing"""
|
||||||
|
|
||||||
_device_id: str | None = None
|
_device_id: str | None = None
|
||||||
@ -572,17 +587,12 @@ class PipelineRun:
|
|||||||
pipeline_data.pipeline_runs.add_run(self)
|
pipeline_data.pipeline_runs.add_run(self)
|
||||||
|
|
||||||
# Initialize with audio settings
|
# Initialize with audio settings
|
||||||
self.audio_processor_buffer = AudioBuffer(AUDIO_PROCESSOR_BYTES)
|
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||||
if self.audio_settings.needs_processor:
|
# Default audio enhancer
|
||||||
# Delay import of webrtc so HA start up is not crashing
|
self.audio_enhancer = MicroVadEnhancer(
|
||||||
# on older architectures (armhf).
|
|
||||||
#
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from webrtc_noise_gain import AudioProcessor
|
|
||||||
|
|
||||||
self.audio_processor = AudioProcessor(
|
|
||||||
self.audio_settings.auto_gain_dbfs,
|
self.audio_settings.auto_gain_dbfs,
|
||||||
self.audio_settings.noise_suppression_level,
|
self.audio_settings.noise_suppression_level,
|
||||||
|
self.audio_settings.is_vad_enabled,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
@ -659,8 +669,8 @@ class PipelineRun:
|
|||||||
|
|
||||||
async def wake_word_detection(
|
async def wake_word_detection(
|
||||||
self,
|
self,
|
||||||
stream: AsyncIterable[ProcessedAudioChunk],
|
stream: AsyncIterable[EnhancedAudioChunk],
|
||||||
audio_chunks_for_stt: list[ProcessedAudioChunk],
|
audio_chunks_for_stt: list[EnhancedAudioChunk],
|
||||||
) -> wake_word.DetectionResult | None:
|
) -> wake_word.DetectionResult | None:
|
||||||
"""Run wake-word-detection portion of pipeline. Returns detection result."""
|
"""Run wake-word-detection portion of pipeline. Returns detection result."""
|
||||||
metadata_dict = asdict(
|
metadata_dict = asdict(
|
||||||
@ -703,10 +713,11 @@ class PipelineRun:
|
|||||||
# Audio chunk buffer. This audio will be forwarded to speech-to-text
|
# Audio chunk buffer. This audio will be forwarded to speech-to-text
|
||||||
# after wake-word-detection.
|
# after wake-word-detection.
|
||||||
num_audio_chunks_to_buffer = int(
|
num_audio_chunks_to_buffer = int(
|
||||||
(wake_word_settings.audio_seconds_to_buffer * 16000)
|
(wake_word_settings.audio_seconds_to_buffer * SAMPLE_RATE)
|
||||||
/ AUDIO_PROCESSOR_SAMPLES
|
/ SAMPLES_PER_CHUNK
|
||||||
)
|
)
|
||||||
stt_audio_buffer: deque[ProcessedAudioChunk] | None = None
|
|
||||||
|
stt_audio_buffer: deque[EnhancedAudioChunk] | None = None
|
||||||
if num_audio_chunks_to_buffer > 0:
|
if num_audio_chunks_to_buffer > 0:
|
||||||
stt_audio_buffer = deque(maxlen=num_audio_chunks_to_buffer)
|
stt_audio_buffer = deque(maxlen=num_audio_chunks_to_buffer)
|
||||||
|
|
||||||
@ -768,7 +779,7 @@ class PipelineRun:
|
|||||||
# speech-to-text so the user does not have to pause before
|
# speech-to-text so the user does not have to pause before
|
||||||
# speaking the voice command.
|
# speaking the voice command.
|
||||||
audio_chunks_for_stt.extend(
|
audio_chunks_for_stt.extend(
|
||||||
ProcessedAudioChunk(
|
EnhancedAudioChunk(
|
||||||
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
|
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
|
||||||
)
|
)
|
||||||
for chunk_ts in result.queued_audio
|
for chunk_ts in result.queued_audio
|
||||||
@ -790,18 +801,17 @@ class PipelineRun:
|
|||||||
|
|
||||||
async def _wake_word_audio_stream(
|
async def _wake_word_audio_stream(
|
||||||
self,
|
self,
|
||||||
audio_stream: AsyncIterable[ProcessedAudioChunk],
|
audio_stream: AsyncIterable[EnhancedAudioChunk],
|
||||||
stt_audio_buffer: deque[ProcessedAudioChunk] | None,
|
stt_audio_buffer: deque[EnhancedAudioChunk] | None,
|
||||||
wake_word_vad: VoiceActivityTimeout | None,
|
wake_word_vad: VoiceActivityTimeout | None,
|
||||||
sample_rate: int = 16000,
|
sample_rate: int = SAMPLE_RATE,
|
||||||
sample_width: int = 2,
|
sample_width: int = SAMPLE_WIDTH,
|
||||||
) -> AsyncIterable[tuple[bytes, int]]:
|
) -> AsyncIterable[tuple[bytes, int]]:
|
||||||
"""Yield audio chunks with timestamps (milliseconds since start of stream).
|
"""Yield audio chunks with timestamps (milliseconds since start of stream).
|
||||||
|
|
||||||
Adds audio to a ring buffer that will be forwarded to speech-to-text after
|
Adds audio to a ring buffer that will be forwarded to speech-to-text after
|
||||||
detection. Times out if VAD detects enough silence.
|
detection. Times out if VAD detects enough silence.
|
||||||
"""
|
"""
|
||||||
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
|
|
||||||
async for chunk in audio_stream:
|
async for chunk in audio_stream:
|
||||||
if self.abort_wake_word_detection:
|
if self.abort_wake_word_detection:
|
||||||
raise WakeWordDetectionAborted
|
raise WakeWordDetectionAborted
|
||||||
@ -816,6 +826,7 @@ class PipelineRun:
|
|||||||
stt_audio_buffer.append(chunk)
|
stt_audio_buffer.append(chunk)
|
||||||
|
|
||||||
if wake_word_vad is not None:
|
if wake_word_vad is not None:
|
||||||
|
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||||
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
|
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
|
||||||
raise WakeWordTimeoutError(
|
raise WakeWordTimeoutError(
|
||||||
code="wake-word-timeout", message="Wake word was not detected"
|
code="wake-word-timeout", message="Wake word was not detected"
|
||||||
@ -852,9 +863,18 @@ class PipelineRun:
|
|||||||
async def speech_to_text(
|
async def speech_to_text(
|
||||||
self,
|
self,
|
||||||
metadata: stt.SpeechMetadata,
|
metadata: stt.SpeechMetadata,
|
||||||
stream: AsyncIterable[ProcessedAudioChunk],
|
stream: AsyncIterable[EnhancedAudioChunk],
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
|
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
|
||||||
|
# Create a background task to prepare the conversation agent
|
||||||
|
if self.end_stage >= PipelineStage.INTENT:
|
||||||
|
self.hass.async_create_background_task(
|
||||||
|
conversation.async_prepare_agent(
|
||||||
|
self.hass, self.intent_agent, self.language
|
||||||
|
),
|
||||||
|
f"prepare conversation agent {self.intent_agent}",
|
||||||
|
)
|
||||||
|
|
||||||
if isinstance(self.stt_provider, stt.Provider):
|
if isinstance(self.stt_provider, stt.Provider):
|
||||||
engine = self.stt_provider.name
|
engine = self.stt_provider.name
|
||||||
else:
|
else:
|
||||||
@ -878,7 +898,9 @@ class PipelineRun:
|
|||||||
# Transcribe audio stream
|
# Transcribe audio stream
|
||||||
stt_vad: VoiceCommandSegmenter | None = None
|
stt_vad: VoiceCommandSegmenter | None = None
|
||||||
if self.audio_settings.is_vad_enabled:
|
if self.audio_settings.is_vad_enabled:
|
||||||
stt_vad = VoiceCommandSegmenter()
|
stt_vad = VoiceCommandSegmenter(
|
||||||
|
silence_seconds=self.audio_settings.silence_seconds
|
||||||
|
)
|
||||||
|
|
||||||
result = await self.stt_provider.async_process_audio_stream(
|
result = await self.stt_provider.async_process_audio_stream(
|
||||||
metadata,
|
metadata,
|
||||||
@ -919,18 +941,18 @@ class PipelineRun:
|
|||||||
|
|
||||||
async def _speech_to_text_stream(
|
async def _speech_to_text_stream(
|
||||||
self,
|
self,
|
||||||
audio_stream: AsyncIterable[ProcessedAudioChunk],
|
audio_stream: AsyncIterable[EnhancedAudioChunk],
|
||||||
stt_vad: VoiceCommandSegmenter | None,
|
stt_vad: VoiceCommandSegmenter | None,
|
||||||
sample_rate: int = 16000,
|
sample_rate: int = SAMPLE_RATE,
|
||||||
sample_width: int = 2,
|
sample_width: int = SAMPLE_WIDTH,
|
||||||
) -> AsyncGenerator[bytes]:
|
) -> AsyncGenerator[bytes]:
|
||||||
"""Yield audio chunks until VAD detects silence or speech-to-text completes."""
|
"""Yield audio chunks until VAD detects silence or speech-to-text completes."""
|
||||||
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
|
|
||||||
sent_vad_start = False
|
sent_vad_start = False
|
||||||
async for chunk in audio_stream:
|
async for chunk in audio_stream:
|
||||||
self._capture_chunk(chunk.audio)
|
self._capture_chunk(chunk.audio)
|
||||||
|
|
||||||
if stt_vad is not None:
|
if stt_vad is not None:
|
||||||
|
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||||
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
||||||
# Silence detected at the end of voice command
|
# Silence detected at the end of voice command
|
||||||
self.process_event(
|
self.process_event(
|
||||||
@ -957,8 +979,6 @@ class PipelineRun:
|
|||||||
"""Prepare recognizing an intent."""
|
"""Prepare recognizing an intent."""
|
||||||
agent_info = conversation.async_get_agent_info(
|
agent_info = conversation.async_get_agent_info(
|
||||||
self.hass,
|
self.hass,
|
||||||
# If no conversation engine is set, use the Home Assistant agent
|
|
||||||
# (the conversation integration default is currently the last one set)
|
|
||||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1036,8 +1056,8 @@ class PipelineRun:
|
|||||||
tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output
|
tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output
|
||||||
if self.tts_audio_output == "wav":
|
if self.tts_audio_output == "wav":
|
||||||
# 16 Khz, 16-bit mono
|
# 16 Khz, 16-bit mono
|
||||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = 16000
|
tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = SAMPLE_RATE
|
||||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = 1
|
tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = SAMPLE_CHANNELS
|
||||||
|
|
||||||
try:
|
try:
|
||||||
options_supported = await tts.async_support_options(
|
options_supported = await tts.async_support_options(
|
||||||
@ -1182,53 +1202,31 @@ class PipelineRun:
|
|||||||
self.debug_recording_thread = None
|
self.debug_recording_thread = None
|
||||||
|
|
||||||
async def process_volume_only(
|
async def process_volume_only(
|
||||||
self,
|
self, audio_stream: AsyncIterable[bytes]
|
||||||
audio_stream: AsyncIterable[bytes],
|
) -> AsyncGenerator[EnhancedAudioChunk]:
|
||||||
sample_rate: int = 16000,
|
|
||||||
sample_width: int = 2,
|
|
||||||
) -> AsyncGenerator[ProcessedAudioChunk]:
|
|
||||||
"""Apply volume transformation only (no VAD/audio enhancements) with optional chunking."""
|
"""Apply volume transformation only (no VAD/audio enhancements) with optional chunking."""
|
||||||
ms_per_sample = sample_rate // 1000
|
|
||||||
ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample
|
|
||||||
timestamp_ms = 0
|
timestamp_ms = 0
|
||||||
|
|
||||||
async for chunk in audio_stream:
|
async for chunk in audio_stream:
|
||||||
if self.audio_settings.volume_multiplier != 1.0:
|
if self.audio_settings.volume_multiplier != 1.0:
|
||||||
chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier)
|
chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier)
|
||||||
|
|
||||||
if self.audio_settings.is_chunking_enabled:
|
for sub_chunk in chunk_samples(
|
||||||
# 10 ms chunking
|
chunk, BYTES_PER_CHUNK, self.audio_chunking_buffer
|
||||||
for chunk_10ms in chunk_samples(
|
|
||||||
chunk, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer
|
|
||||||
):
|
):
|
||||||
yield ProcessedAudioChunk(
|
yield EnhancedAudioChunk(
|
||||||
audio=chunk_10ms,
|
audio=sub_chunk,
|
||||||
timestamp_ms=timestamp_ms,
|
timestamp_ms=timestamp_ms,
|
||||||
is_speech=None, # no VAD
|
is_speech=None, # no VAD
|
||||||
)
|
)
|
||||||
timestamp_ms += ms_per_chunk
|
timestamp_ms += MS_PER_CHUNK
|
||||||
else:
|
|
||||||
# No chunking
|
|
||||||
yield ProcessedAudioChunk(
|
|
||||||
audio=chunk,
|
|
||||||
timestamp_ms=timestamp_ms,
|
|
||||||
is_speech=None, # no VAD
|
|
||||||
)
|
|
||||||
timestamp_ms += (len(chunk) // sample_width) // ms_per_sample
|
|
||||||
|
|
||||||
async def process_enhance_audio(
|
async def process_enhance_audio(
|
||||||
self,
|
self, audio_stream: AsyncIterable[bytes]
|
||||||
audio_stream: AsyncIterable[bytes],
|
) -> AsyncGenerator[EnhancedAudioChunk]:
|
||||||
sample_rate: int = 16000,
|
"""Split audio into chunks and apply VAD/noise suppression/auto gain/volume transformation."""
|
||||||
sample_width: int = 2,
|
assert self.audio_enhancer is not None
|
||||||
) -> AsyncGenerator[ProcessedAudioChunk]:
|
|
||||||
"""Split audio into 10 ms chunks and apply VAD/noise suppression/auto gain/volume transformation."""
|
|
||||||
assert self.audio_processor is not None
|
|
||||||
|
|
||||||
ms_per_sample = sample_rate // 1000
|
|
||||||
ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample
|
|
||||||
timestamp_ms = 0
|
timestamp_ms = 0
|
||||||
|
|
||||||
async for dirty_samples in audio_stream:
|
async for dirty_samples in audio_stream:
|
||||||
if self.audio_settings.volume_multiplier != 1.0:
|
if self.audio_settings.volume_multiplier != 1.0:
|
||||||
# Static gain
|
# Static gain
|
||||||
@ -1236,18 +1234,12 @@ class PipelineRun:
|
|||||||
dirty_samples, self.audio_settings.volume_multiplier
|
dirty_samples, self.audio_settings.volume_multiplier
|
||||||
)
|
)
|
||||||
|
|
||||||
# Split into 10ms chunks for audio enhancements/VAD
|
# Split into chunks for audio enhancements/VAD
|
||||||
for dirty_10ms_chunk in chunk_samples(
|
for dirty_chunk in chunk_samples(
|
||||||
dirty_samples, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer
|
dirty_samples, BYTES_PER_CHUNK, self.audio_chunking_buffer
|
||||||
):
|
):
|
||||||
ap_result = self.audio_processor.Process10ms(dirty_10ms_chunk)
|
yield self.audio_enhancer.enhance_chunk(dirty_chunk, timestamp_ms)
|
||||||
yield ProcessedAudioChunk(
|
timestamp_ms += MS_PER_CHUNK
|
||||||
audio=ap_result.audio,
|
|
||||||
timestamp_ms=timestamp_ms,
|
|
||||||
is_speech=ap_result.is_speech,
|
|
||||||
)
|
|
||||||
|
|
||||||
timestamp_ms += ms_per_chunk
|
|
||||||
|
|
||||||
|
|
||||||
def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes:
|
def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes:
|
||||||
@ -1287,9 +1279,9 @@ def _pipeline_debug_recording_thread_proc(
|
|||||||
|
|
||||||
wav_path = run_recording_dir / f"{message}.wav"
|
wav_path = run_recording_dir / f"{message}.wav"
|
||||||
wav_writer = wave.open(str(wav_path), "wb")
|
wav_writer = wave.open(str(wav_path), "wb")
|
||||||
wav_writer.setframerate(16000)
|
wav_writer.setframerate(SAMPLE_RATE)
|
||||||
wav_writer.setsampwidth(2)
|
wav_writer.setsampwidth(SAMPLE_WIDTH)
|
||||||
wav_writer.setnchannels(1)
|
wav_writer.setnchannels(SAMPLE_CHANNELS)
|
||||||
elif isinstance(message, bytes):
|
elif isinstance(message, bytes):
|
||||||
# Chunk of 16-bit mono audio at 16Khz
|
# Chunk of 16-bit mono audio at 16Khz
|
||||||
if wav_writer is not None:
|
if wav_writer is not None:
|
||||||
@ -1332,8 +1324,8 @@ class PipelineInput:
|
|||||||
"""Run pipeline."""
|
"""Run pipeline."""
|
||||||
self.run.start(device_id=self.device_id)
|
self.run.start(device_id=self.device_id)
|
||||||
current_stage: PipelineStage | None = self.run.start_stage
|
current_stage: PipelineStage | None = self.run.start_stage
|
||||||
stt_audio_buffer: list[ProcessedAudioChunk] = []
|
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||||
stt_processed_stream: AsyncIterable[ProcessedAudioChunk] | None = None
|
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||||
|
|
||||||
if self.stt_stream is not None:
|
if self.stt_stream is not None:
|
||||||
if self.run.audio_settings.needs_processor:
|
if self.run.audio_settings.needs_processor:
|
||||||
@ -1387,7 +1379,7 @@ class PipelineInput:
|
|||||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||||
# This is basically an async itertools.chain.
|
# This is basically an async itertools.chain.
|
||||||
async def buffer_then_audio_stream() -> (
|
async def buffer_then_audio_stream() -> (
|
||||||
AsyncGenerator[ProcessedAudioChunk]
|
AsyncGenerator[EnhancedAudioChunk]
|
||||||
):
|
):
|
||||||
# Buffered audio
|
# Buffered audio
|
||||||
for chunk in stt_audio_buffer:
|
for chunk in stt_audio_buffer:
|
||||||
@ -1653,6 +1645,12 @@ class PipelineStorageCollectionWebsocket(
|
|||||||
if item_id is None:
|
if item_id is None:
|
||||||
item_id = self.storage_collection.async_get_preferred_item()
|
item_id = self.storage_collection.async_get_preferred_item()
|
||||||
|
|
||||||
|
if item_id.startswith("conversation.") and hass.states.get(item_id):
|
||||||
|
connection.send_result(
|
||||||
|
msg["id"], _async_get_pipeline_from_conversation_entity(hass, item_id)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
if item_id not in self.storage_collection.data:
|
if item_id not in self.storage_collection.data:
|
||||||
connection.send_error(
|
connection.send_error(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
@ -1671,7 +1669,7 @@ class PipelineStorageCollectionWebsocket(
|
|||||||
connection.send_result(
|
connection.send_result(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
{
|
{
|
||||||
"pipelines": self.storage_collection.async_items(),
|
"pipelines": async_get_pipelines(hass),
|
||||||
"preferred_pipeline": self.storage_collection.async_get_preferred_item(),
|
"preferred_pipeline": self.storage_collection.async_get_preferred_item(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -2,11 +2,13 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from collections.abc import Callable, Iterable
|
||||||
from collections.abc import Iterable
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
from typing import Final, cast
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
_SAMPLE_RATE: Final = 16000 # Hz
|
_SAMPLE_RATE: Final = 16000 # Hz
|
||||||
_SAMPLE_WIDTH: Final = 2 # bytes
|
_SAMPLE_WIDTH: Final = 2 # bytes
|
||||||
@ -32,44 +34,6 @@ class VadSensitivity(StrEnum):
|
|||||||
return 1.0
|
return 1.0
|
||||||
|
|
||||||
|
|
||||||
class VoiceActivityDetector(ABC):
|
|
||||||
"""Base class for voice activity detectors (VAD)."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def is_speech(self, chunk: bytes) -> bool:
|
|
||||||
"""Return True if audio chunk contains speech."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def samples_per_chunk(self) -> int | None:
|
|
||||||
"""Return number of samples per chunk or None if chunking is not required."""
|
|
||||||
|
|
||||||
|
|
||||||
class WebRtcVad(VoiceActivityDetector):
|
|
||||||
"""Voice activity detector based on webrtc."""
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
"""Initialize webrtcvad."""
|
|
||||||
# Delay import of webrtc so HA start up is not crashing
|
|
||||||
# on older architectures (armhf).
|
|
||||||
#
|
|
||||||
# pylint: disable=import-outside-toplevel
|
|
||||||
from webrtc_noise_gain import AudioProcessor
|
|
||||||
|
|
||||||
# Just VAD: no noise suppression or auto gain
|
|
||||||
self._audio_processor = AudioProcessor(0, 0)
|
|
||||||
|
|
||||||
def is_speech(self, chunk: bytes) -> bool:
|
|
||||||
"""Return True if audio chunk contains speech."""
|
|
||||||
result = self._audio_processor.Process10ms(chunk)
|
|
||||||
return cast(bool, result.is_speech)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def samples_per_chunk(self) -> int | None:
|
|
||||||
"""Return 10 ms."""
|
|
||||||
return int(0.01 * _SAMPLE_RATE) # 10 ms
|
|
||||||
|
|
||||||
|
|
||||||
class AudioBuffer:
|
class AudioBuffer:
|
||||||
"""Fixed-sized audio buffer with variable internal length."""
|
"""Fixed-sized audio buffer with variable internal length."""
|
||||||
|
|
||||||
@ -116,7 +80,7 @@ class VoiceCommandSegmenter:
|
|||||||
speech_seconds: float = 0.3
|
speech_seconds: float = 0.3
|
||||||
"""Seconds of speech before voice command has started."""
|
"""Seconds of speech before voice command has started."""
|
||||||
|
|
||||||
silence_seconds: float = 0.5
|
silence_seconds: float = 1.0
|
||||||
"""Seconds of silence after voice command has ended."""
|
"""Seconds of silence after voice command has ended."""
|
||||||
|
|
||||||
timeout_seconds: float = 15.0
|
timeout_seconds: float = 15.0
|
||||||
@ -159,6 +123,10 @@ class VoiceCommandSegmenter:
|
|||||||
"""
|
"""
|
||||||
self._timeout_seconds_left -= chunk_seconds
|
self._timeout_seconds_left -= chunk_seconds
|
||||||
if self._timeout_seconds_left <= 0:
|
if self._timeout_seconds_left <= 0:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"VAD end of speech detection timed out after %s seconds",
|
||||||
|
self.timeout_seconds,
|
||||||
|
)
|
||||||
self.reset()
|
self.reset()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -169,29 +137,38 @@ class VoiceCommandSegmenter:
|
|||||||
if self._speech_seconds_left <= 0:
|
if self._speech_seconds_left <= 0:
|
||||||
# Inside voice command
|
# Inside voice command
|
||||||
self.in_command = True
|
self.in_command = True
|
||||||
|
self._silence_seconds_left = self.silence_seconds
|
||||||
|
_LOGGER.debug("Voice command started")
|
||||||
else:
|
else:
|
||||||
# Reset if enough silence
|
# Reset if enough silence
|
||||||
self._reset_seconds_left -= chunk_seconds
|
self._reset_seconds_left -= chunk_seconds
|
||||||
if self._reset_seconds_left <= 0:
|
if self._reset_seconds_left <= 0:
|
||||||
self._speech_seconds_left = self.speech_seconds
|
self._speech_seconds_left = self.speech_seconds
|
||||||
|
self._reset_seconds_left = self.reset_seconds
|
||||||
elif not is_speech:
|
elif not is_speech:
|
||||||
|
# Silence in command
|
||||||
self._reset_seconds_left = self.reset_seconds
|
self._reset_seconds_left = self.reset_seconds
|
||||||
self._silence_seconds_left -= chunk_seconds
|
self._silence_seconds_left -= chunk_seconds
|
||||||
if self._silence_seconds_left <= 0:
|
if self._silence_seconds_left <= 0:
|
||||||
|
# Command finished successfully
|
||||||
self.reset()
|
self.reset()
|
||||||
|
_LOGGER.debug("Voice command finished")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
# Reset if enough speech
|
# Speech in command.
|
||||||
|
# Reset silence counter if enough speech.
|
||||||
self._reset_seconds_left -= chunk_seconds
|
self._reset_seconds_left -= chunk_seconds
|
||||||
if self._reset_seconds_left <= 0:
|
if self._reset_seconds_left <= 0:
|
||||||
self._silence_seconds_left = self.silence_seconds
|
self._silence_seconds_left = self.silence_seconds
|
||||||
|
self._reset_seconds_left = self.reset_seconds
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def process_with_vad(
|
def process_with_vad(
|
||||||
self,
|
self,
|
||||||
chunk: bytes,
|
chunk: bytes,
|
||||||
vad: VoiceActivityDetector,
|
vad_samples_per_chunk: int | None,
|
||||||
|
vad_is_speech: Callable[[bytes], bool],
|
||||||
leftover_chunk_buffer: AudioBuffer | None,
|
leftover_chunk_buffer: AudioBuffer | None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Process an audio chunk using an external VAD.
|
"""Process an audio chunk using an external VAD.
|
||||||
@ -200,20 +177,20 @@ class VoiceCommandSegmenter:
|
|||||||
|
|
||||||
Returns False when voice command is finished.
|
Returns False when voice command is finished.
|
||||||
"""
|
"""
|
||||||
if vad.samples_per_chunk is None:
|
if vad_samples_per_chunk is None:
|
||||||
# No chunking
|
# No chunking
|
||||||
chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE
|
chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE
|
||||||
is_speech = vad.is_speech(chunk)
|
is_speech = vad_is_speech(chunk)
|
||||||
return self.process(chunk_seconds, is_speech)
|
return self.process(chunk_seconds, is_speech)
|
||||||
|
|
||||||
if leftover_chunk_buffer is None:
|
if leftover_chunk_buffer is None:
|
||||||
raise ValueError("leftover_chunk_buffer is required when vad uses chunking")
|
raise ValueError("leftover_chunk_buffer is required when vad uses chunking")
|
||||||
|
|
||||||
# With chunking
|
# With chunking
|
||||||
seconds_per_chunk = vad.samples_per_chunk / _SAMPLE_RATE
|
seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE
|
||||||
bytes_per_chunk = vad.samples_per_chunk * _SAMPLE_WIDTH
|
bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH
|
||||||
for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer):
|
for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer):
|
||||||
is_speech = vad.is_speech(vad_chunk)
|
is_speech = vad_is_speech(vad_chunk)
|
||||||
if not self.process(seconds_per_chunk, is_speech):
|
if not self.process(seconds_per_chunk, is_speech):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -5,13 +5,12 @@ import asyncio
|
|||||||
# Suppressing disable=deprecated-module is needed for Python 3.11
|
# Suppressing disable=deprecated-module is needed for Python 3.11
|
||||||
import audioop # pylint: disable=deprecated-module
|
import audioop # pylint: disable=deprecated-module
|
||||||
import base64
|
import base64
|
||||||
from collections.abc import Callable
|
from collections.abc import AsyncGenerator, Callable
|
||||||
import contextlib
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
from typing_extensions import AsyncGenerator
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import conversation, stt, tts, websocket_api
|
from homeassistant.components import conversation, stt, tts, websocket_api
|
||||||
@ -25,6 +24,9 @@ from .const import (
|
|||||||
DEFAULT_WAKE_WORD_TIMEOUT,
|
DEFAULT_WAKE_WORD_TIMEOUT,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
EVENT_RECORDING,
|
EVENT_RECORDING,
|
||||||
|
SAMPLE_CHANNELS,
|
||||||
|
SAMPLE_RATE,
|
||||||
|
SAMPLE_WIDTH,
|
||||||
)
|
)
|
||||||
from .error import PipelineNotFound
|
from .error import PipelineNotFound
|
||||||
from .pipeline import (
|
from .pipeline import (
|
||||||
@ -93,7 +95,6 @@ def async_register_websocket_api(hass: HomeAssistant) -> None:
|
|||||||
vol.Optional("volume_multiplier"): float,
|
vol.Optional("volume_multiplier"): float,
|
||||||
# Advanced use cases/testing
|
# Advanced use cases/testing
|
||||||
vol.Optional("no_vad"): bool,
|
vol.Optional("no_vad"): bool,
|
||||||
vol.Optional("no_chunking"): bool,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
@ -171,9 +172,14 @@ async def websocket_run(
|
|||||||
|
|
||||||
# Yield until we receive an empty chunk
|
# Yield until we receive an empty chunk
|
||||||
while chunk := await audio_queue.get():
|
while chunk := await audio_queue.get():
|
||||||
if incoming_sample_rate != 16000:
|
if incoming_sample_rate != SAMPLE_RATE:
|
||||||
chunk, state = audioop.ratecv(
|
chunk, state = audioop.ratecv(
|
||||||
chunk, 2, 1, incoming_sample_rate, 16000, state
|
chunk,
|
||||||
|
SAMPLE_WIDTH,
|
||||||
|
SAMPLE_CHANNELS,
|
||||||
|
incoming_sample_rate,
|
||||||
|
SAMPLE_RATE,
|
||||||
|
state,
|
||||||
)
|
)
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
@ -207,7 +213,6 @@ async def websocket_run(
|
|||||||
auto_gain_dbfs=msg_input.get("auto_gain_dbfs", 0),
|
auto_gain_dbfs=msg_input.get("auto_gain_dbfs", 0),
|
||||||
volume_multiplier=msg_input.get("volume_multiplier", 1.0),
|
volume_multiplier=msg_input.get("volume_multiplier", 1.0),
|
||||||
is_vad_enabled=not msg_input.get("no_vad", False),
|
is_vad_enabled=not msg_input.get("no_vad", False),
|
||||||
is_chunking_enabled=not msg_input.get("no_chunking", False),
|
|
||||||
)
|
)
|
||||||
elif start_stage == PipelineStage.INTENT:
|
elif start_stage == PipelineStage.INTENT:
|
||||||
# Input to conversation agent
|
# Input to conversation agent
|
||||||
@ -379,8 +384,8 @@ def websocket_get_run(
|
|||||||
vol.Required("type"): "assist_pipeline/language/list",
|
vol.Required("type"): "assist_pipeline/language/list",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@websocket_api.async_response
|
@callback
|
||||||
async def websocket_list_languages(
|
def websocket_list_languages(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
connection: websocket_api.connection.ActiveConnection,
|
connection: websocket_api.connection.ActiveConnection,
|
||||||
msg: dict[str, Any],
|
msg: dict[str, Any],
|
||||||
@ -390,7 +395,7 @@ async def websocket_list_languages(
|
|||||||
This will return a list of languages which are supported by at least one stt, tts
|
This will return a list of languages which are supported by at least one stt, tts
|
||||||
and conversation engine respectively.
|
and conversation engine respectively.
|
||||||
"""
|
"""
|
||||||
conv_language_tags = await conversation.async_get_conversation_languages(hass)
|
conv_language_tags = conversation.async_get_conversation_languages(hass)
|
||||||
stt_language_tags = stt.async_get_speech_to_text_languages(hass)
|
stt_language_tags = stt.async_get_speech_to_text_languages(hass)
|
||||||
tts_language_tags = tts.async_get_text_to_speech_languages(hass)
|
tts_language_tags = tts.async_get_text_to_speech_languages(hass)
|
||||||
pipeline_languages: set[str] | None = None
|
pipeline_languages: set[str] | None = None
|
||||||
@ -425,9 +430,9 @@ async def websocket_list_languages(
|
|||||||
connection.send_result(
|
connection.send_result(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
{
|
{
|
||||||
"languages": sorted(pipeline_languages)
|
"languages": (
|
||||||
if pipeline_languages
|
sorted(pipeline_languages) if pipeline_languages else pipeline_languages
|
||||||
else pipeline_languages
|
)
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ from datetime import datetime, timedelta
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType
|
from yalexs.activity import Activity, ActivityType
|
||||||
from yalexs.doorbell import DoorbellDetail
|
from yalexs.doorbell import DoorbellDetail
|
||||||
from yalexs.lock import LockDetail, LockDoorStatus
|
from yalexs.lock import LockDetail, LockDoorStatus
|
||||||
from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL
|
from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL
|
||||||
@ -26,67 +26,25 @@ from homeassistant.helpers.event import async_call_later
|
|||||||
|
|
||||||
from . import AugustConfigEntry, AugustData
|
from . import AugustConfigEntry, AugustData
|
||||||
from .entity import AugustDescriptionEntity
|
from .entity import AugustDescriptionEntity
|
||||||
|
from .util import (
|
||||||
|
retrieve_ding_activity,
|
||||||
|
retrieve_doorbell_motion_activity,
|
||||||
|
retrieve_online_state,
|
||||||
|
retrieve_time_based_activity,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds())
|
|
||||||
TIME_TO_RECHECK_DETECTION = timedelta(
|
TIME_TO_RECHECK_DETECTION = timedelta(
|
||||||
seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3
|
seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _retrieve_online_state(
|
|
||||||
data: AugustData, detail: DoorbellDetail | LockDetail
|
|
||||||
) -> bool:
|
|
||||||
"""Get the latest state of the sensor."""
|
|
||||||
# The doorbell will go into standby mode when there is no motion
|
|
||||||
# for a short while. It will wake by itself when needed so we need
|
|
||||||
# to consider is available or we will not report motion or dings
|
|
||||||
if isinstance(detail, DoorbellDetail):
|
|
||||||
return detail.is_online or detail.is_standby
|
|
||||||
return detail.bridge_is_online
|
|
||||||
|
|
||||||
|
|
||||||
def _retrieve_time_based_state(
|
|
||||||
activities: set[ActivityType], data: AugustData, detail: DoorbellDetail
|
|
||||||
) -> bool:
|
|
||||||
"""Get the latest state of the sensor."""
|
|
||||||
stream = data.activity_stream
|
|
||||||
if latest := stream.get_latest_device_activity(detail.device_id, activities):
|
|
||||||
return _activity_time_based_state(latest)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
_RING_ACTIVITIES = {ActivityType.DOORBELL_DING}
|
|
||||||
|
|
||||||
|
|
||||||
def _retrieve_ding_state(data: AugustData, detail: DoorbellDetail | LockDetail) -> bool:
|
|
||||||
stream = data.activity_stream
|
|
||||||
latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES)
|
|
||||||
if latest is None or (
|
|
||||||
data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
return _activity_time_based_state(latest)
|
|
||||||
|
|
||||||
|
|
||||||
def _activity_time_based_state(latest: Activity) -> bool:
|
|
||||||
"""Get the latest state of the sensor."""
|
|
||||||
start = latest.activity_start_time
|
|
||||||
end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION
|
|
||||||
return start <= _native_datetime() <= end
|
|
||||||
|
|
||||||
|
|
||||||
def _native_datetime() -> datetime:
|
|
||||||
"""Return time in the format august uses without timezone."""
|
|
||||||
return datetime.now()
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class AugustDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription):
|
class AugustDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||||
"""Describes August binary_sensor entity."""
|
"""Describes August binary_sensor entity."""
|
||||||
|
|
||||||
value_fn: Callable[[AugustData, DoorbellDetail], bool]
|
value_fn: Callable[[AugustData, DoorbellDetail | LockDetail], Activity | None]
|
||||||
is_time_based: bool
|
is_time_based: bool
|
||||||
|
|
||||||
|
|
||||||
@ -99,14 +57,14 @@ SENSOR_TYPES_VIDEO_DOORBELL = (
|
|||||||
AugustDoorbellBinarySensorEntityDescription(
|
AugustDoorbellBinarySensorEntityDescription(
|
||||||
key="motion",
|
key="motion",
|
||||||
device_class=BinarySensorDeviceClass.MOTION,
|
device_class=BinarySensorDeviceClass.MOTION,
|
||||||
value_fn=partial(_retrieve_time_based_state, {ActivityType.DOORBELL_MOTION}),
|
value_fn=retrieve_doorbell_motion_activity,
|
||||||
is_time_based=True,
|
is_time_based=True,
|
||||||
),
|
),
|
||||||
AugustDoorbellBinarySensorEntityDescription(
|
AugustDoorbellBinarySensorEntityDescription(
|
||||||
key="image capture",
|
key="image capture",
|
||||||
translation_key="image_capture",
|
translation_key="image_capture",
|
||||||
value_fn=partial(
|
value_fn=partial(
|
||||||
_retrieve_time_based_state, {ActivityType.DOORBELL_IMAGE_CAPTURE}
|
retrieve_time_based_activity, {ActivityType.DOORBELL_IMAGE_CAPTURE}
|
||||||
),
|
),
|
||||||
is_time_based=True,
|
is_time_based=True,
|
||||||
),
|
),
|
||||||
@ -114,7 +72,7 @@ SENSOR_TYPES_VIDEO_DOORBELL = (
|
|||||||
key="online",
|
key="online",
|
||||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
value_fn=_retrieve_online_state,
|
value_fn=retrieve_online_state,
|
||||||
is_time_based=False,
|
is_time_based=False,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -123,8 +81,9 @@ SENSOR_TYPES_VIDEO_DOORBELL = (
|
|||||||
SENSOR_TYPES_DOORBELL: tuple[AugustDoorbellBinarySensorEntityDescription, ...] = (
|
SENSOR_TYPES_DOORBELL: tuple[AugustDoorbellBinarySensorEntityDescription, ...] = (
|
||||||
AugustDoorbellBinarySensorEntityDescription(
|
AugustDoorbellBinarySensorEntityDescription(
|
||||||
key="ding",
|
key="ding",
|
||||||
|
translation_key="ding",
|
||||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||||
value_fn=_retrieve_ding_state,
|
value_fn=retrieve_ding_activity,
|
||||||
is_time_based=True,
|
is_time_based=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -189,10 +148,12 @@ class AugustDoorbellBinarySensor(AugustDescriptionEntity, BinarySensorEntity):
|
|||||||
def _update_from_data(self) -> None:
|
def _update_from_data(self) -> None:
|
||||||
"""Get the latest state of the sensor."""
|
"""Get the latest state of the sensor."""
|
||||||
self._cancel_any_pending_updates()
|
self._cancel_any_pending_updates()
|
||||||
self._attr_is_on = self.entity_description.value_fn(self._data, self._detail)
|
self._attr_is_on = bool(
|
||||||
|
self.entity_description.value_fn(self._data, self._detail)
|
||||||
|
)
|
||||||
|
|
||||||
if self.entity_description.is_time_based:
|
if self.entity_description.is_time_based:
|
||||||
self._attr_available = _retrieve_online_state(self._data, self._detail)
|
self._attr_available = retrieve_online_state(self._data, self._detail)
|
||||||
self._schedule_update_to_recheck_turn_off_sensor()
|
self._schedule_update_to_recheck_turn_off_sensor()
|
||||||
else:
|
else:
|
||||||
self._attr_available = True
|
self._attr_available = True
|
||||||
|
@ -16,8 +16,6 @@ NOTIFICATION_TITLE = "August"
|
|||||||
|
|
||||||
MANUFACTURER = "August Home Inc."
|
MANUFACTURER = "August Home Inc."
|
||||||
|
|
||||||
DEFAULT_AUGUST_CONFIG_FILE = ".august.conf"
|
|
||||||
|
|
||||||
DEFAULT_NAME = "August"
|
DEFAULT_NAME = "August"
|
||||||
DOMAIN = "august"
|
DOMAIN = "august"
|
||||||
|
|
||||||
@ -42,6 +40,7 @@ PLATFORMS = [
|
|||||||
Platform.BINARY_SENSOR,
|
Platform.BINARY_SENSOR,
|
||||||
Platform.BUTTON,
|
Platform.BUTTON,
|
||||||
Platform.CAMERA,
|
Platform.CAMERA,
|
||||||
|
Platform.EVENT,
|
||||||
Platform.LOCK,
|
Platform.LOCK,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
]
|
]
|
||||||
|
104
homeassistant/components/august/event.py
Normal file
104
homeassistant/components/august/event.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
"""Support for august events."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from yalexs.activity import Activity
|
||||||
|
from yalexs.doorbell import DoorbellDetail
|
||||||
|
from yalexs.lock import LockDetail
|
||||||
|
|
||||||
|
from homeassistant.components.event import (
|
||||||
|
EventDeviceClass,
|
||||||
|
EventEntity,
|
||||||
|
EventEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from . import AugustConfigEntry, AugustData
|
||||||
|
from .entity import AugustDescriptionEntity
|
||||||
|
from .util import (
|
||||||
|
retrieve_ding_activity,
|
||||||
|
retrieve_doorbell_motion_activity,
|
||||||
|
retrieve_online_state,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AugustEventEntityDescription(EventEntityDescription):
|
||||||
|
"""Describe august event entities."""
|
||||||
|
|
||||||
|
value_fn: Callable[[AugustData, DoorbellDetail | LockDetail], Activity | None]
|
||||||
|
|
||||||
|
|
||||||
|
TYPES_VIDEO_DOORBELL: tuple[AugustEventEntityDescription, ...] = (
|
||||||
|
AugustEventEntityDescription(
|
||||||
|
key="motion",
|
||||||
|
translation_key="motion",
|
||||||
|
device_class=EventDeviceClass.MOTION,
|
||||||
|
event_types=["motion"],
|
||||||
|
value_fn=retrieve_doorbell_motion_activity,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
TYPES_DOORBELL: tuple[AugustEventEntityDescription, ...] = (
|
||||||
|
AugustEventEntityDescription(
|
||||||
|
key="doorbell",
|
||||||
|
translation_key="doorbell",
|
||||||
|
device_class=EventDeviceClass.DOORBELL,
|
||||||
|
event_types=["ring"],
|
||||||
|
value_fn=retrieve_ding_activity,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: AugustConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the august event platform."""
|
||||||
|
data = config_entry.runtime_data
|
||||||
|
entities: list[AugustEventEntity] = []
|
||||||
|
|
||||||
|
for lock in data.locks:
|
||||||
|
detail = data.get_device_detail(lock.device_id)
|
||||||
|
if detail.doorbell:
|
||||||
|
entities.extend(
|
||||||
|
AugustEventEntity(data, lock, description)
|
||||||
|
for description in TYPES_DOORBELL
|
||||||
|
)
|
||||||
|
|
||||||
|
for doorbell in data.doorbells:
|
||||||
|
entities.extend(
|
||||||
|
AugustEventEntity(data, doorbell, description)
|
||||||
|
for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL
|
||||||
|
)
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AugustEventEntity(AugustDescriptionEntity, EventEntity):
|
||||||
|
"""An august event entity."""
|
||||||
|
|
||||||
|
entity_description: AugustEventEntityDescription
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_last_activity: Activity | None = None
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _update_from_data(self) -> None:
|
||||||
|
"""Update from data."""
|
||||||
|
self._attr_available = retrieve_online_state(self._data, self._detail)
|
||||||
|
current_activity = self.entity_description.value_fn(self._data, self._detail)
|
||||||
|
if not current_activity or current_activity == self._last_activity:
|
||||||
|
return
|
||||||
|
self._last_activity = current_activity
|
||||||
|
event_types = self.entity_description.event_types
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
assert event_types is not None
|
||||||
|
self._trigger_event(event_type=event_types[0])
|
||||||
|
self.async_write_ha_state()
|
@ -28,5 +28,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==6.4.2", "yalexs-ble==2.4.3"]
|
"requirements": ["yalexs==6.4.3", "yalexs-ble==2.4.3"]
|
||||||
}
|
}
|
||||||
|
@ -40,6 +40,9 @@
|
|||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
"binary_sensor": {
|
"binary_sensor": {
|
||||||
|
"ding": {
|
||||||
|
"name": "Doorbell ding"
|
||||||
|
},
|
||||||
"image_capture": {
|
"image_capture": {
|
||||||
"name": "Image capture"
|
"name": "Image capture"
|
||||||
}
|
}
|
||||||
@ -58,6 +61,26 @@
|
|||||||
"operator": {
|
"operator": {
|
||||||
"name": "Operator"
|
"name": "Operator"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"event": {
|
||||||
|
"doorbell": {
|
||||||
|
"state_attributes": {
|
||||||
|
"event_type": {
|
||||||
|
"state": {
|
||||||
|
"ring": "Ring"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"motion": {
|
||||||
|
"state_attributes": {
|
||||||
|
"event_type": {
|
||||||
|
"state": {
|
||||||
|
"motion": "Motion"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,24 @@
|
|||||||
"""August util functions."""
|
"""August util functions."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from functools import partial
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType
|
||||||
|
from yalexs.doorbell import DoorbellDetail
|
||||||
|
from yalexs.lock import LockDetail
|
||||||
|
from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import aiohttp_client
|
from homeassistant.helpers import aiohttp_client
|
||||||
|
|
||||||
|
from . import AugustData
|
||||||
|
|
||||||
|
TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds())
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession:
|
def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession:
|
||||||
@ -22,3 +34,60 @@ def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSess
|
|||||||
# we can allow IPv6 again
|
# we can allow IPv6 again
|
||||||
#
|
#
|
||||||
return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET)
|
return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET)
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve_time_based_activity(
|
||||||
|
activities: set[ActivityType], data: AugustData, detail: DoorbellDetail | LockDetail
|
||||||
|
) -> Activity | None:
|
||||||
|
"""Get the latest state of the sensor."""
|
||||||
|
stream = data.activity_stream
|
||||||
|
if latest := stream.get_latest_device_activity(detail.device_id, activities):
|
||||||
|
return _activity_time_based(latest)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
_RING_ACTIVITIES = {ActivityType.DOORBELL_DING}
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve_ding_activity(
|
||||||
|
data: AugustData, detail: DoorbellDetail | LockDetail
|
||||||
|
) -> Activity | None:
|
||||||
|
"""Get the ring/ding state."""
|
||||||
|
stream = data.activity_stream
|
||||||
|
latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES)
|
||||||
|
if latest is None or (
|
||||||
|
data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED
|
||||||
|
):
|
||||||
|
return None
|
||||||
|
return _activity_time_based(latest)
|
||||||
|
|
||||||
|
|
||||||
|
retrieve_doorbell_motion_activity = partial(
|
||||||
|
retrieve_time_based_activity, {ActivityType.DOORBELL_MOTION}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _activity_time_based(latest: Activity) -> Activity | None:
|
||||||
|
"""Get the latest state of the sensor."""
|
||||||
|
start = latest.activity_start_time
|
||||||
|
end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION
|
||||||
|
if start <= _native_datetime() <= end:
|
||||||
|
return latest
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _native_datetime() -> datetime:
|
||||||
|
"""Return time in the format august uses without timezone."""
|
||||||
|
return datetime.now()
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve_online_state(
|
||||||
|
data: AugustData, detail: DoorbellDetail | LockDetail
|
||||||
|
) -> bool:
|
||||||
|
"""Get the latest state of the sensor."""
|
||||||
|
# The doorbell will go into standby mode when there is no motion
|
||||||
|
# for a short while. It will wake by itself when needed so we need
|
||||||
|
# to consider is available or we will not report motion or dings
|
||||||
|
if isinstance(detail, DoorbellDetail):
|
||||||
|
return detail.is_online or detail.is_standby
|
||||||
|
return detail.bridge_is_online
|
||||||
|
49
homeassistant/components/autarco/__init__.py
Normal file
49
homeassistant/components/autarco/__init__.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
"""The Autarco integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from autarco import Autarco
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
from .coordinator import AutarcoDataUpdateCoordinator
|
||||||
|
|
||||||
|
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||||
|
|
||||||
|
type AutarcoConfigEntry = ConfigEntry[list[AutarcoDataUpdateCoordinator]]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> bool:
|
||||||
|
"""Set up Autarco from a config entry."""
|
||||||
|
client = Autarco(
|
||||||
|
email=entry.data[CONF_EMAIL],
|
||||||
|
password=entry.data[CONF_PASSWORD],
|
||||||
|
session=async_get_clientsession(hass),
|
||||||
|
)
|
||||||
|
account_sites = await client.get_account()
|
||||||
|
|
||||||
|
coordinators: list[AutarcoDataUpdateCoordinator] = [
|
||||||
|
AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites
|
||||||
|
]
|
||||||
|
|
||||||
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
coordinator.async_config_entry_first_refresh()
|
||||||
|
for coordinator in coordinators
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
entry.runtime_data = coordinators
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
57
homeassistant/components/autarco/config_flow.py
Normal file
57
homeassistant/components/autarco/config_flow.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
"""Config flow for Autarco integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from autarco import Autarco, AutarcoAuthenticationError, AutarcoConnectionError
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_EMAIL): str,
|
||||||
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Autarco."""
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
if user_input is not None:
|
||||||
|
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
|
||||||
|
client = Autarco(
|
||||||
|
email=user_input[CONF_EMAIL],
|
||||||
|
password=user_input[CONF_PASSWORD],
|
||||||
|
session=async_get_clientsession(self.hass),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await client.get_account()
|
||||||
|
except AutarcoAuthenticationError:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except AutarcoConnectionError:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
else:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input[CONF_EMAIL],
|
||||||
|
data={
|
||||||
|
CONF_EMAIL: user_input[CONF_EMAIL],
|
||||||
|
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
errors=errors,
|
||||||
|
data_schema=DATA_SCHEMA,
|
||||||
|
)
|
11
homeassistant/components/autarco/const.py
Normal file
11
homeassistant/components/autarco/const.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""Constants for the Autarco integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
DOMAIN: Final = "autarco"
|
||||||
|
LOGGER = logging.getLogger(__package__)
|
||||||
|
SCAN_INTERVAL = timedelta(minutes=5)
|
49
homeassistant/components/autarco/coordinator.py
Normal file
49
homeassistant/components/autarco/coordinator.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
"""Coordinator for Autarco integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import NamedTuple
|
||||||
|
|
||||||
|
from autarco import AccountSite, Autarco, Inverter, Solar
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
|
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||||
|
|
||||||
|
|
||||||
|
class AutarcoData(NamedTuple):
|
||||||
|
"""Class for defining data in dict."""
|
||||||
|
|
||||||
|
solar: Solar
|
||||||
|
inverters: dict[str, Inverter]
|
||||||
|
|
||||||
|
|
||||||
|
class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]):
|
||||||
|
"""Class to manage fetching Autarco data from the API."""
|
||||||
|
|
||||||
|
config_entry: ConfigEntry
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
client: Autarco,
|
||||||
|
site: AccountSite,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize global Autarco data updater."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
LOGGER,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=SCAN_INTERVAL,
|
||||||
|
)
|
||||||
|
self.client = client
|
||||||
|
self.site = site
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> AutarcoData:
|
||||||
|
"""Fetch data from Autarco API."""
|
||||||
|
return AutarcoData(
|
||||||
|
solar=await self.client.get_solar(self.site.public_key),
|
||||||
|
inverters=await self.client.get_inverters(self.site.public_key),
|
||||||
|
)
|
43
homeassistant/components/autarco/diagnostics.py
Normal file
43
homeassistant/components/autarco/diagnostics.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""Support for the Autarco diagnostics."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import AutarcoConfigEntry, AutarcoDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, config_entry: AutarcoConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
autarco_data: list[AutarcoDataUpdateCoordinator] = config_entry.runtime_data
|
||||||
|
|
||||||
|
return {
|
||||||
|
"sites_data": [
|
||||||
|
{
|
||||||
|
"id": coordinator.site.site_id,
|
||||||
|
"name": coordinator.site.system_name,
|
||||||
|
"health": coordinator.site.health,
|
||||||
|
"solar": {
|
||||||
|
"power_production": coordinator.data.solar.power_production,
|
||||||
|
"energy_production_today": coordinator.data.solar.energy_production_today,
|
||||||
|
"energy_production_month": coordinator.data.solar.energy_production_month,
|
||||||
|
"energy_production_total": coordinator.data.solar.energy_production_total,
|
||||||
|
},
|
||||||
|
"inverters": [
|
||||||
|
{
|
||||||
|
"serial_number": inverter.serial_number,
|
||||||
|
"out_ac_power": inverter.out_ac_power,
|
||||||
|
"out_ac_energy_total": inverter.out_ac_energy_total,
|
||||||
|
"grid_turned_off": inverter.grid_turned_off,
|
||||||
|
"health": inverter.health,
|
||||||
|
}
|
||||||
|
for inverter in coordinator.data.inverters.values()
|
||||||
|
],
|
||||||
|
}
|
||||||
|
for coordinator in autarco_data
|
||||||
|
],
|
||||||
|
}
|
9
homeassistant/components/autarco/manifest.json
Normal file
9
homeassistant/components/autarco/manifest.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"domain": "autarco",
|
||||||
|
"name": "Autarco",
|
||||||
|
"codeowners": ["@klaasnicolaas"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||||
|
"iot_class": "cloud_polling",
|
||||||
|
"requirements": ["autarco==2.0.0"]
|
||||||
|
}
|
189
homeassistant/components/autarco/sensor.py
Normal file
189
homeassistant/components/autarco/sensor.py
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
"""Support for Autarco sensors."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from autarco import Inverter, Solar
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorEntityDescription,
|
||||||
|
SensorStateClass,
|
||||||
|
)
|
||||||
|
from homeassistant.const import UnitOfEnergy, UnitOfPower
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.typing import StateType
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from . import AutarcoConfigEntry
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AutarcoDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AutarcoSolarSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Describes an Autarco sensor entity."""
|
||||||
|
|
||||||
|
value_fn: Callable[[Solar], StateType]
|
||||||
|
|
||||||
|
|
||||||
|
SENSORS_SOLAR: tuple[AutarcoSolarSensorEntityDescription, ...] = (
|
||||||
|
AutarcoSolarSensorEntityDescription(
|
||||||
|
key="power_production",
|
||||||
|
translation_key="power_production",
|
||||||
|
native_unit_of_measurement=UnitOfPower.WATT,
|
||||||
|
device_class=SensorDeviceClass.POWER,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda solar: solar.power_production,
|
||||||
|
),
|
||||||
|
AutarcoSolarSensorEntityDescription(
|
||||||
|
key="energy_production_today",
|
||||||
|
translation_key="energy_production_today",
|
||||||
|
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
|
device_class=SensorDeviceClass.ENERGY,
|
||||||
|
value_fn=lambda solar: solar.energy_production_today,
|
||||||
|
),
|
||||||
|
AutarcoSolarSensorEntityDescription(
|
||||||
|
key="energy_production_month",
|
||||||
|
translation_key="energy_production_month",
|
||||||
|
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
|
device_class=SensorDeviceClass.ENERGY,
|
||||||
|
value_fn=lambda solar: solar.energy_production_month,
|
||||||
|
),
|
||||||
|
AutarcoSolarSensorEntityDescription(
|
||||||
|
key="energy_production_total",
|
||||||
|
translation_key="energy_production_total",
|
||||||
|
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
|
device_class=SensorDeviceClass.ENERGY,
|
||||||
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
|
value_fn=lambda solar: solar.energy_production_total,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AutarcoInverterSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Describes an Autarco inverter sensor entity."""
|
||||||
|
|
||||||
|
value_fn: Callable[[Inverter], StateType]
|
||||||
|
|
||||||
|
|
||||||
|
SENSORS_INVERTER: tuple[AutarcoInverterSensorEntityDescription, ...] = (
|
||||||
|
AutarcoInverterSensorEntityDescription(
|
||||||
|
key="out_ac_power",
|
||||||
|
translation_key="out_ac_power",
|
||||||
|
native_unit_of_measurement=UnitOfPower.WATT,
|
||||||
|
device_class=SensorDeviceClass.POWER,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda inverter: inverter.out_ac_power,
|
||||||
|
),
|
||||||
|
AutarcoInverterSensorEntityDescription(
|
||||||
|
key="out_ac_energy_total",
|
||||||
|
translation_key="out_ac_energy_total",
|
||||||
|
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
|
device_class=SensorDeviceClass.ENERGY,
|
||||||
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
|
value_fn=lambda inverter: inverter.out_ac_energy_total,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AutarcoConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Autarco sensors based on a config entry."""
|
||||||
|
entities: list[SensorEntity] = []
|
||||||
|
for coordinator in entry.runtime_data:
|
||||||
|
entities.extend(
|
||||||
|
AutarcoSolarSensorEntity(
|
||||||
|
coordinator=coordinator,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
for description in SENSORS_SOLAR
|
||||||
|
)
|
||||||
|
entities.extend(
|
||||||
|
AutarcoInverterSensorEntity(
|
||||||
|
coordinator=coordinator,
|
||||||
|
description=description,
|
||||||
|
serial_number=inverter,
|
||||||
|
)
|
||||||
|
for description in SENSORS_INVERTER
|
||||||
|
for inverter in coordinator.data.inverters
|
||||||
|
)
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AutarcoSolarSensorEntity(
|
||||||
|
CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity
|
||||||
|
):
|
||||||
|
"""Defines an Autarco solar sensor."""
|
||||||
|
|
||||||
|
entity_description: AutarcoSolarSensorEntityDescription
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
coordinator: AutarcoDataUpdateCoordinator,
|
||||||
|
description: AutarcoSolarSensorEntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize Autarco sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
self._attr_unique_id = f"{coordinator.site.site_id}_solar_{description.key}"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, f"{coordinator.site.site_id}_solar")},
|
||||||
|
entry_type=DeviceEntryType.SERVICE,
|
||||||
|
manufacturer="Autarco",
|
||||||
|
name="Solar",
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> StateType:
|
||||||
|
"""Return the state of the sensor."""
|
||||||
|
return self.entity_description.value_fn(self.coordinator.data.solar)
|
||||||
|
|
||||||
|
|
||||||
|
class AutarcoInverterSensorEntity(
|
||||||
|
CoordinatorEntity[AutarcoDataUpdateCoordinator], SensorEntity
|
||||||
|
):
|
||||||
|
"""Defines an Autarco inverter sensor."""
|
||||||
|
|
||||||
|
entity_description: AutarcoInverterSensorEntityDescription
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
coordinator: AutarcoDataUpdateCoordinator,
|
||||||
|
description: AutarcoInverterSensorEntityDescription,
|
||||||
|
serial_number: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize Autarco sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
self._serial_number = serial_number
|
||||||
|
self._attr_unique_id = f"{serial_number}_{description.key}"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, serial_number)},
|
||||||
|
name=f"Inverter {serial_number}",
|
||||||
|
manufacturer="Autarco",
|
||||||
|
model="Inverter",
|
||||||
|
serial_number=serial_number,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> StateType:
|
||||||
|
"""Return the state of the sensor."""
|
||||||
|
return self.entity_description.value_fn(
|
||||||
|
self.coordinator.data.inverters[self._serial_number]
|
||||||
|
)
|
46
homeassistant/components/autarco/strings.json
Normal file
46
homeassistant/components/autarco/strings.json
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"description": "Connect to your Autarco account to get information about your solar panels.",
|
||||||
|
"data": {
|
||||||
|
"email": "[%key:common::config_flow::data::email%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"email": "The email address of your Autarco account.",
|
||||||
|
"password": "The password of your Autarco account."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"power_production": {
|
||||||
|
"name": "Power production"
|
||||||
|
},
|
||||||
|
"energy_production_today": {
|
||||||
|
"name": "Energy production today"
|
||||||
|
},
|
||||||
|
"energy_production_month": {
|
||||||
|
"name": "Energy production month"
|
||||||
|
},
|
||||||
|
"energy_production_total": {
|
||||||
|
"name": "Energy production total"
|
||||||
|
},
|
||||||
|
"out_ac_power": {
|
||||||
|
"name": "Power AC output"
|
||||||
|
},
|
||||||
|
"out_ac_energy_total": {
|
||||||
|
"name": "Energy AC output total"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user