mirror of
https://github.com/home-assistant/core.git
synced 2025-07-22 04:37:06 +00:00
2024.3.0 (#112516)
This commit is contained in:
commit
1aa5a07501
34
.coveragerc
34
.coveragerc
@ -73,6 +73,10 @@ omit =
|
|||||||
homeassistant/components/apple_tv/browse_media.py
|
homeassistant/components/apple_tv/browse_media.py
|
||||||
homeassistant/components/apple_tv/media_player.py
|
homeassistant/components/apple_tv/media_player.py
|
||||||
homeassistant/components/apple_tv/remote.py
|
homeassistant/components/apple_tv/remote.py
|
||||||
|
homeassistant/components/aprilaire/__init__.py
|
||||||
|
homeassistant/components/aprilaire/climate.py
|
||||||
|
homeassistant/components/aprilaire/coordinator.py
|
||||||
|
homeassistant/components/aprilaire/entity.py
|
||||||
homeassistant/components/aqualogic/*
|
homeassistant/components/aqualogic/*
|
||||||
homeassistant/components/aquostv/media_player.py
|
homeassistant/components/aquostv/media_player.py
|
||||||
homeassistant/components/arcam_fmj/__init__.py
|
homeassistant/components/arcam_fmj/__init__.py
|
||||||
@ -89,7 +93,7 @@ omit =
|
|||||||
homeassistant/components/aseko_pool_live/entity.py
|
homeassistant/components/aseko_pool_live/entity.py
|
||||||
homeassistant/components/aseko_pool_live/sensor.py
|
homeassistant/components/aseko_pool_live/sensor.py
|
||||||
homeassistant/components/asterisk_cdr/mailbox.py
|
homeassistant/components/asterisk_cdr/mailbox.py
|
||||||
homeassistant/components/asterisk_mbox/*
|
homeassistant/components/asterisk_mbox/mailbox.py
|
||||||
homeassistant/components/aten_pe/*
|
homeassistant/components/aten_pe/*
|
||||||
homeassistant/components/atome/*
|
homeassistant/components/atome/*
|
||||||
homeassistant/components/aurora/__init__.py
|
homeassistant/components/aurora/__init__.py
|
||||||
@ -188,6 +192,7 @@ omit =
|
|||||||
homeassistant/components/comelit/const.py
|
homeassistant/components/comelit/const.py
|
||||||
homeassistant/components/comelit/cover.py
|
homeassistant/components/comelit/cover.py
|
||||||
homeassistant/components/comelit/coordinator.py
|
homeassistant/components/comelit/coordinator.py
|
||||||
|
homeassistant/components/comelit/humidifier.py
|
||||||
homeassistant/components/comelit/light.py
|
homeassistant/components/comelit/light.py
|
||||||
homeassistant/components/comelit/sensor.py
|
homeassistant/components/comelit/sensor.py
|
||||||
homeassistant/components/comelit/switch.py
|
homeassistant/components/comelit/switch.py
|
||||||
@ -359,7 +364,6 @@ omit =
|
|||||||
homeassistant/components/escea/__init__.py
|
homeassistant/components/escea/__init__.py
|
||||||
homeassistant/components/escea/climate.py
|
homeassistant/components/escea/climate.py
|
||||||
homeassistant/components/escea/discovery.py
|
homeassistant/components/escea/discovery.py
|
||||||
homeassistant/components/esphome/manager.py
|
|
||||||
homeassistant/components/etherscan/sensor.py
|
homeassistant/components/etherscan/sensor.py
|
||||||
homeassistant/components/eufy/*
|
homeassistant/components/eufy/*
|
||||||
homeassistant/components/eufylife_ble/__init__.py
|
homeassistant/components/eufylife_ble/__init__.py
|
||||||
@ -555,6 +559,7 @@ omit =
|
|||||||
homeassistant/components/hunterdouglas_powerview/coordinator.py
|
homeassistant/components/hunterdouglas_powerview/coordinator.py
|
||||||
homeassistant/components/hunterdouglas_powerview/cover.py
|
homeassistant/components/hunterdouglas_powerview/cover.py
|
||||||
homeassistant/components/hunterdouglas_powerview/entity.py
|
homeassistant/components/hunterdouglas_powerview/entity.py
|
||||||
|
homeassistant/components/hunterdouglas_powerview/number.py
|
||||||
homeassistant/components/hunterdouglas_powerview/select.py
|
homeassistant/components/hunterdouglas_powerview/select.py
|
||||||
homeassistant/components/hunterdouglas_powerview/sensor.py
|
homeassistant/components/hunterdouglas_powerview/sensor.py
|
||||||
homeassistant/components/hunterdouglas_powerview/shade_data.py
|
homeassistant/components/hunterdouglas_powerview/shade_data.py
|
||||||
@ -634,12 +639,6 @@ omit =
|
|||||||
homeassistant/components/izone/climate.py
|
homeassistant/components/izone/climate.py
|
||||||
homeassistant/components/izone/discovery.py
|
homeassistant/components/izone/discovery.py
|
||||||
homeassistant/components/joaoapps_join/*
|
homeassistant/components/joaoapps_join/*
|
||||||
homeassistant/components/juicenet/__init__.py
|
|
||||||
homeassistant/components/juicenet/device.py
|
|
||||||
homeassistant/components/juicenet/entity.py
|
|
||||||
homeassistant/components/juicenet/number.py
|
|
||||||
homeassistant/components/juicenet/sensor.py
|
|
||||||
homeassistant/components/juicenet/switch.py
|
|
||||||
homeassistant/components/justnimbus/coordinator.py
|
homeassistant/components/justnimbus/coordinator.py
|
||||||
homeassistant/components/justnimbus/entity.py
|
homeassistant/components/justnimbus/entity.py
|
||||||
homeassistant/components/justnimbus/sensor.py
|
homeassistant/components/justnimbus/sensor.py
|
||||||
@ -765,6 +764,16 @@ omit =
|
|||||||
homeassistant/components/meteoclimatic/__init__.py
|
homeassistant/components/meteoclimatic/__init__.py
|
||||||
homeassistant/components/meteoclimatic/sensor.py
|
homeassistant/components/meteoclimatic/sensor.py
|
||||||
homeassistant/components/meteoclimatic/weather.py
|
homeassistant/components/meteoclimatic/weather.py
|
||||||
|
homeassistant/components/microbees/__init__.py
|
||||||
|
homeassistant/components/microbees/api.py
|
||||||
|
homeassistant/components/microbees/application_credentials.py
|
||||||
|
homeassistant/components/microbees/button.py
|
||||||
|
homeassistant/components/microbees/const.py
|
||||||
|
homeassistant/components/microbees/coordinator.py
|
||||||
|
homeassistant/components/microbees/entity.py
|
||||||
|
homeassistant/components/microbees/light.py
|
||||||
|
homeassistant/components/microbees/sensor.py
|
||||||
|
homeassistant/components/microbees/switch.py
|
||||||
homeassistant/components/microsoft/tts.py
|
homeassistant/components/microsoft/tts.py
|
||||||
homeassistant/components/mikrotik/hub.py
|
homeassistant/components/mikrotik/hub.py
|
||||||
homeassistant/components/mill/climate.py
|
homeassistant/components/mill/climate.py
|
||||||
@ -874,6 +883,7 @@ omit =
|
|||||||
homeassistant/components/notion/__init__.py
|
homeassistant/components/notion/__init__.py
|
||||||
homeassistant/components/notion/binary_sensor.py
|
homeassistant/components/notion/binary_sensor.py
|
||||||
homeassistant/components/notion/sensor.py
|
homeassistant/components/notion/sensor.py
|
||||||
|
homeassistant/components/notion/util.py
|
||||||
homeassistant/components/nsw_fuel_station/sensor.py
|
homeassistant/components/nsw_fuel_station/sensor.py
|
||||||
homeassistant/components/nuki/__init__.py
|
homeassistant/components/nuki/__init__.py
|
||||||
homeassistant/components/nuki/binary_sensor.py
|
homeassistant/components/nuki/binary_sensor.py
|
||||||
@ -1067,6 +1077,7 @@ omit =
|
|||||||
homeassistant/components/renson/sensor.py
|
homeassistant/components/renson/sensor.py
|
||||||
homeassistant/components/renson/button.py
|
homeassistant/components/renson/button.py
|
||||||
homeassistant/components/renson/fan.py
|
homeassistant/components/renson/fan.py
|
||||||
|
homeassistant/components/renson/switch.py
|
||||||
homeassistant/components/renson/binary_sensor.py
|
homeassistant/components/renson/binary_sensor.py
|
||||||
homeassistant/components/renson/number.py
|
homeassistant/components/renson/number.py
|
||||||
homeassistant/components/renson/time.py
|
homeassistant/components/renson/time.py
|
||||||
@ -1535,6 +1546,7 @@ omit =
|
|||||||
homeassistant/components/vicare/entity.py
|
homeassistant/components/vicare/entity.py
|
||||||
homeassistant/components/vicare/number.py
|
homeassistant/components/vicare/number.py
|
||||||
homeassistant/components/vicare/sensor.py
|
homeassistant/components/vicare/sensor.py
|
||||||
|
homeassistant/components/vicare/types.py
|
||||||
homeassistant/components/vicare/utils.py
|
homeassistant/components/vicare/utils.py
|
||||||
homeassistant/components/vicare/water_heater.py
|
homeassistant/components/vicare/water_heater.py
|
||||||
homeassistant/components/vilfo/__init__.py
|
homeassistant/components/vilfo/__init__.py
|
||||||
@ -1572,6 +1584,11 @@ omit =
|
|||||||
homeassistant/components/weatherflow/__init__.py
|
homeassistant/components/weatherflow/__init__.py
|
||||||
homeassistant/components/weatherflow/const.py
|
homeassistant/components/weatherflow/const.py
|
||||||
homeassistant/components/weatherflow/sensor.py
|
homeassistant/components/weatherflow/sensor.py
|
||||||
|
homeassistant/components/weatherflow_cloud/__init__.py
|
||||||
|
homeassistant/components/weatherflow_cloud/const.py
|
||||||
|
homeassistant/components/weatherflow_cloud/coordinator.py
|
||||||
|
homeassistant/components/weatherflow_cloud/weather.py
|
||||||
|
homeassistant/components/webmin/sensor.py
|
||||||
homeassistant/components/wiffi/__init__.py
|
homeassistant/components/wiffi/__init__.py
|
||||||
homeassistant/components/wiffi/binary_sensor.py
|
homeassistant/components/wiffi/binary_sensor.py
|
||||||
homeassistant/components/wiffi/sensor.py
|
homeassistant/components/wiffi/sensor.py
|
||||||
@ -1695,6 +1712,7 @@ omit =
|
|||||||
homeassistant/components/myuplink/application_credentials.py
|
homeassistant/components/myuplink/application_credentials.py
|
||||||
homeassistant/components/myuplink/coordinator.py
|
homeassistant/components/myuplink/coordinator.py
|
||||||
homeassistant/components/myuplink/entity.py
|
homeassistant/components/myuplink/entity.py
|
||||||
|
homeassistant/components/myuplink/helpers.py
|
||||||
homeassistant/components/myuplink/sensor.py
|
homeassistant/components/myuplink/sensor.py
|
||||||
|
|
||||||
|
|
||||||
|
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@ -103,7 +103,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v3.0.0
|
uses: dawidd6/action-download-artifact@v3.1.2
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/frontend
|
repo: home-assistant/frontend
|
||||||
@ -114,7 +114,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of intents
|
- name: Download nightly wheels of intents
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v3.0.0
|
uses: dawidd6/action-download-artifact@v3.1.2
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/intents-package
|
repo: home-assistant/intents-package
|
||||||
@ -341,7 +341,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.3.0
|
uses: sigstore/cosign-installer@v3.4.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.0.2"
|
||||||
|
|
||||||
|
25
.github/workflows/ci.yaml
vendored
25
.github/workflows/ci.yaml
vendored
@ -36,7 +36,7 @@ env:
|
|||||||
CACHE_VERSION: 5
|
CACHE_VERSION: 5
|
||||||
PIP_CACHE_VERSION: 4
|
PIP_CACHE_VERSION: 4
|
||||||
MYPY_CACHE_VERSION: 7
|
MYPY_CACHE_VERSION: 7
|
||||||
HA_SHORT_VERSION: "2024.2"
|
HA_SHORT_VERSION: "2024.3"
|
||||||
DEFAULT_PYTHON: "3.11"
|
DEFAULT_PYTHON: "3.11"
|
||||||
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
|
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@ -103,7 +103,7 @@ jobs:
|
|||||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||||
- name: Filter for core changes
|
- name: Filter for core changes
|
||||||
uses: dorny/paths-filter@v3.0.0
|
uses: dorny/paths-filter@v3.0.1
|
||||||
id: core
|
id: core
|
||||||
with:
|
with:
|
||||||
filters: .core_files.yaml
|
filters: .core_files.yaml
|
||||||
@ -118,7 +118,7 @@ jobs:
|
|||||||
echo "Result:"
|
echo "Result:"
|
||||||
cat .integration_paths.yaml
|
cat .integration_paths.yaml
|
||||||
- name: Filter for integration changes
|
- name: Filter for integration changes
|
||||||
uses: dorny/paths-filter@v3.0.0
|
uses: dorny/paths-filter@v3.0.1
|
||||||
id: integrations
|
id: integrations
|
||||||
with:
|
with:
|
||||||
filters: .integration_paths.yaml
|
filters: .integration_paths.yaml
|
||||||
@ -803,10 +803,11 @@ jobs:
|
|||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
|
overwrite: true
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -928,11 +929,12 @@ jobs:
|
|||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-mariadb-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
|
overwrite: true
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1055,11 +1057,12 @@ jobs:
|
|||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v3.1.0
|
uses: actions/upload-artifact@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
|
overwrite: true
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1076,10 +1079,12 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.3
|
||||||
|
with:
|
||||||
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov (full coverage)
|
- name: Upload coverage to Codecov (full coverage)
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: Wandalen/wretry.action@v1.3.0
|
uses: Wandalen/wretry.action@v1.4.4
|
||||||
with:
|
with:
|
||||||
action: codecov/codecov-action@v3.1.3
|
action: codecov/codecov-action@v3.1.3
|
||||||
with: |
|
with: |
|
||||||
@ -1090,7 +1095,7 @@ jobs:
|
|||||||
attempt_delay: 30000
|
attempt_delay: 30000
|
||||||
- name: Upload coverage to Codecov (partial coverage)
|
- name: Upload coverage to Codecov (partial coverage)
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: Wandalen/wretry.action@v1.3.0
|
uses: Wandalen/wretry.action@v1.4.4
|
||||||
with:
|
with:
|
||||||
action: codecov/codecov-action@v3.1.3
|
action: codecov/codecov-action@v3.1.3
|
||||||
with: |
|
with: |
|
||||||
|
9
.github/workflows/codeql.yml
vendored
9
.github/workflows/codeql.yml
vendored
@ -2,11 +2,6 @@ name: "CodeQL"
|
|||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
# yamllint disable-line rule:truthy
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- dev
|
|
||||||
- rc
|
|
||||||
- master
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "30 18 * * 4"
|
- cron: "30 18 * * 4"
|
||||||
|
|
||||||
@ -29,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.23.2
|
uses: github/codeql-action/init@v3.24.5
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.23.2
|
uses: github/codeql-action/analyze@v3.24.5
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
18
.github/workflows/wheels.yml
vendored
18
.github/workflows/wheels.yml
vendored
@ -63,16 +63,18 @@ jobs:
|
|||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v3.1.2
|
uses: actions/upload-artifact@v4.3.1
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
|
overwrite: true
|
||||||
|
|
||||||
core:
|
core:
|
||||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||||
@ -82,19 +84,19 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp311", "cp312"]
|
abi: ["cp312"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.3
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.3
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@ -120,19 +122,19 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp311", "cp312"]
|
abi: ["cp312"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.3
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4.1.3
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.1.8
|
rev: v0.2.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
|
@ -80,6 +80,7 @@ homeassistant.components.anthemav.*
|
|||||||
homeassistant.components.apache_kafka.*
|
homeassistant.components.apache_kafka.*
|
||||||
homeassistant.components.apcupsd.*
|
homeassistant.components.apcupsd.*
|
||||||
homeassistant.components.api.*
|
homeassistant.components.api.*
|
||||||
|
homeassistant.components.apple_tv.*
|
||||||
homeassistant.components.apprise.*
|
homeassistant.components.apprise.*
|
||||||
homeassistant.components.aprs.*
|
homeassistant.components.aprs.*
|
||||||
homeassistant.components.aqualogic.*
|
homeassistant.components.aqualogic.*
|
||||||
|
43
CODEOWNERS
43
CODEOWNERS
@ -104,6 +104,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/application_credentials/ @home-assistant/core
|
/tests/components/application_credentials/ @home-assistant/core
|
||||||
/homeassistant/components/apprise/ @caronc
|
/homeassistant/components/apprise/ @caronc
|
||||||
/tests/components/apprise/ @caronc
|
/tests/components/apprise/ @caronc
|
||||||
|
/homeassistant/components/aprilaire/ @chamberlain2007
|
||||||
|
/tests/components/aprilaire/ @chamberlain2007
|
||||||
/homeassistant/components/aprs/ @PhilRW
|
/homeassistant/components/aprs/ @PhilRW
|
||||||
/tests/components/aprs/ @PhilRW
|
/tests/components/aprs/ @PhilRW
|
||||||
/homeassistant/components/aranet/ @aschmitz @thecode
|
/homeassistant/components/aranet/ @aschmitz @thecode
|
||||||
@ -157,8 +159,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/binary_sensor/ @home-assistant/core
|
/homeassistant/components/binary_sensor/ @home-assistant/core
|
||||||
/tests/components/binary_sensor/ @home-assistant/core
|
/tests/components/binary_sensor/ @home-assistant/core
|
||||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||||
/homeassistant/components/blebox/ @bbx-a @riokuu
|
/homeassistant/components/blebox/ @bbx-a @riokuu @swistakm
|
||||||
/tests/components/blebox/ @bbx-a @riokuu
|
/tests/components/blebox/ @bbx-a @riokuu @swistakm
|
||||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||||
/tests/components/blink/ @fronzbot @mkmer
|
/tests/components/blink/ @fronzbot @mkmer
|
||||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
||||||
@ -329,8 +331,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ecoforest/ @pjanuario
|
/tests/components/ecoforest/ @pjanuario
|
||||||
/homeassistant/components/econet/ @w1ll1am23
|
/homeassistant/components/econet/ @w1ll1am23
|
||||||
/tests/components/econet/ @w1ll1am23
|
/tests/components/econet/ @w1ll1am23
|
||||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185 @edenhaus
|
/homeassistant/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
|
||||||
/tests/components/ecovacs/ @OverloadUT @mib1185 @edenhaus
|
/tests/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
|
||||||
/homeassistant/components/ecowitt/ @pvizeli
|
/homeassistant/components/ecowitt/ @pvizeli
|
||||||
/tests/components/ecowitt/ @pvizeli
|
/tests/components/ecowitt/ @pvizeli
|
||||||
/homeassistant/components/efergy/ @tkdrob
|
/homeassistant/components/efergy/ @tkdrob
|
||||||
@ -584,6 +586,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/humidifier/ @home-assistant/core @Shulyaka
|
/tests/components/humidifier/ @home-assistant/core @Shulyaka
|
||||||
/homeassistant/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
/homeassistant/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||||
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||||
|
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
||||||
|
/tests/components/husqvarna_automower/ @Thomas55555
|
||||||
/homeassistant/components/huum/ @frwickst
|
/homeassistant/components/huum/ @frwickst
|
||||||
/tests/components/huum/ @frwickst
|
/tests/components/huum/ @frwickst
|
||||||
/homeassistant/components/hvv_departures/ @vigonotion
|
/homeassistant/components/hvv_departures/ @vigonotion
|
||||||
@ -665,8 +669,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/jellyfin/ @j-stienstra @ctalkington
|
/tests/components/jellyfin/ @j-stienstra @ctalkington
|
||||||
/homeassistant/components/jewish_calendar/ @tsvi
|
/homeassistant/components/jewish_calendar/ @tsvi
|
||||||
/tests/components/jewish_calendar/ @tsvi
|
/tests/components/jewish_calendar/ @tsvi
|
||||||
/homeassistant/components/juicenet/ @jesserockz
|
|
||||||
/tests/components/juicenet/ @jesserockz
|
|
||||||
/homeassistant/components/justnimbus/ @kvanzuijlen
|
/homeassistant/components/justnimbus/ @kvanzuijlen
|
||||||
/tests/components/justnimbus/ @kvanzuijlen
|
/tests/components/justnimbus/ @kvanzuijlen
|
||||||
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
||||||
@ -766,8 +768,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lupusec/ @majuss @suaveolent
|
/tests/components/lupusec/ @majuss @suaveolent
|
||||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||||
/tests/components/lutron/ @cdheiser @wilburCForce
|
/tests/components/lutron/ @cdheiser @wilburCForce
|
||||||
/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues
|
/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
||||||
/tests/components/lutron_caseta/ @swails @bdraco @danaues
|
/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
||||||
/homeassistant/components/lyric/ @timmo001
|
/homeassistant/components/lyric/ @timmo001
|
||||||
/tests/components/lyric/ @timmo001
|
/tests/components/lyric/ @timmo001
|
||||||
/homeassistant/components/mastodon/ @fabaff
|
/homeassistant/components/mastodon/ @fabaff
|
||||||
@ -801,6 +803,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/meteoclimatic/ @adrianmo
|
/tests/components/meteoclimatic/ @adrianmo
|
||||||
/homeassistant/components/metoffice/ @MrHarcombe @avee87
|
/homeassistant/components/metoffice/ @MrHarcombe @avee87
|
||||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||||
|
/homeassistant/components/microbees/ @microBeesTech
|
||||||
|
/tests/components/microbees/ @microBeesTech
|
||||||
/homeassistant/components/mikrotik/ @engrbm87
|
/homeassistant/components/mikrotik/ @engrbm87
|
||||||
/tests/components/mikrotik/ @engrbm87
|
/tests/components/mikrotik/ @engrbm87
|
||||||
/homeassistant/components/mill/ @danielhiversen
|
/homeassistant/components/mill/ @danielhiversen
|
||||||
@ -848,8 +852,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
||||||
/homeassistant/components/mystrom/ @fabaff
|
/homeassistant/components/mystrom/ @fabaff
|
||||||
/tests/components/mystrom/ @fabaff
|
/tests/components/mystrom/ @fabaff
|
||||||
/homeassistant/components/myuplink/ @pajzo
|
/homeassistant/components/myuplink/ @pajzo @astrandb
|
||||||
/tests/components/myuplink/ @pajzo
|
/tests/components/myuplink/ @pajzo @astrandb
|
||||||
/homeassistant/components/nam/ @bieniu
|
/homeassistant/components/nam/ @bieniu
|
||||||
/tests/components/nam/ @bieniu
|
/tests/components/nam/ @bieniu
|
||||||
/homeassistant/components/nanoleaf/ @milanmeu
|
/homeassistant/components/nanoleaf/ @milanmeu
|
||||||
@ -967,8 +971,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/otbr/ @home-assistant/core
|
/tests/components/otbr/ @home-assistant/core
|
||||||
/homeassistant/components/ourgroceries/ @OnFreund
|
/homeassistant/components/ourgroceries/ @OnFreund
|
||||||
/tests/components/ourgroceries/ @OnFreund
|
/tests/components/ourgroceries/ @OnFreund
|
||||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
|
||||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
|
||||||
/homeassistant/components/ovo_energy/ @timmo001
|
/homeassistant/components/ovo_energy/ @timmo001
|
||||||
/tests/components/ovo_energy/ @timmo001
|
/tests/components/ovo_energy/ @timmo001
|
||||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||||
@ -1125,8 +1129,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/roku/ @ctalkington
|
/tests/components/roku/ @ctalkington
|
||||||
/homeassistant/components/romy/ @xeniter
|
/homeassistant/components/romy/ @xeniter
|
||||||
/tests/components/romy/ @xeniter
|
/tests/components/romy/ @xeniter
|
||||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1
|
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
||||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1
|
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
||||||
/homeassistant/components/roon/ @pavoni
|
/homeassistant/components/roon/ @pavoni
|
||||||
/tests/components/roon/ @pavoni
|
/tests/components/roon/ @pavoni
|
||||||
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
||||||
@ -1452,13 +1456,14 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/v2c/ @dgomes
|
/tests/components/v2c/ @dgomes
|
||||||
/homeassistant/components/vacuum/ @home-assistant/core
|
/homeassistant/components/vacuum/ @home-assistant/core
|
||||||
/tests/components/vacuum/ @home-assistant/core
|
/tests/components/vacuum/ @home-assistant/core
|
||||||
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru-
|
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru-
|
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||||
/homeassistant/components/valve/ @home-assistant/core
|
/homeassistant/components/valve/ @home-assistant/core
|
||||||
/tests/components/valve/ @home-assistant/core
|
/tests/components/valve/ @home-assistant/core
|
||||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||||
/tests/components/velbus/ @Cereal2nd @brefra
|
/tests/components/velbus/ @Cereal2nd @brefra
|
||||||
/homeassistant/components/velux/ @Julius2342
|
/homeassistant/components/velux/ @Julius2342 @DeerMaximum
|
||||||
|
/tests/components/velux/ @Julius2342 @DeerMaximum
|
||||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||||
/tests/components/venstar/ @garbled1 @jhollowe
|
/tests/components/venstar/ @garbled1 @jhollowe
|
||||||
/homeassistant/components/versasense/ @imstevenxyz
|
/homeassistant/components/versasense/ @imstevenxyz
|
||||||
@ -1504,10 +1509,14 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/weather/ @home-assistant/core
|
/tests/components/weather/ @home-assistant/core
|
||||||
/homeassistant/components/weatherflow/ @natekspencer @jeeftor
|
/homeassistant/components/weatherflow/ @natekspencer @jeeftor
|
||||||
/tests/components/weatherflow/ @natekspencer @jeeftor
|
/tests/components/weatherflow/ @natekspencer @jeeftor
|
||||||
|
/homeassistant/components/weatherflow_cloud/ @jeeftor
|
||||||
|
/tests/components/weatherflow_cloud/ @jeeftor
|
||||||
/homeassistant/components/weatherkit/ @tjhorner
|
/homeassistant/components/weatherkit/ @tjhorner
|
||||||
/tests/components/weatherkit/ @tjhorner
|
/tests/components/weatherkit/ @tjhorner
|
||||||
/homeassistant/components/webhook/ @home-assistant/core
|
/homeassistant/components/webhook/ @home-assistant/core
|
||||||
/tests/components/webhook/ @home-assistant/core
|
/tests/components/webhook/ @home-assistant/core
|
||||||
|
/homeassistant/components/webmin/ @autinerd
|
||||||
|
/tests/components/webmin/ @autinerd
|
||||||
/homeassistant/components/webostv/ @thecode
|
/homeassistant/components/webostv/ @thecode
|
||||||
/tests/components/webostv/ @thecode
|
/tests/components/webostv/ @thecode
|
||||||
/homeassistant/components/websocket_api/ @home-assistant/core
|
/homeassistant/components/websocket_api/ @home-assistant/core
|
||||||
|
10
build.yaml
10
build.yaml
@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.0
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.1
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.0
|
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.1
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.0
|
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.1
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.0
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.1
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.0
|
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.1
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
|
@ -3,9 +3,10 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import contextlib
|
import contextlib
|
||||||
from datetime import datetime, timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
from operator import itemgetter
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
@ -13,13 +14,28 @@ import threading
|
|||||||
from time import monotonic
|
from time import monotonic
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
# Import cryptography early since import openssl is not thread-safe
|
||||||
|
# _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
|
||||||
|
import cryptography.hazmat.backends.openssl.backend # noqa: F401
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
import yarl
|
import yarl
|
||||||
|
|
||||||
from . import config as conf_util, config_entries, core, loader, requirements
|
from . import config as conf_util, config_entries, core, loader, requirements
|
||||||
from .components import http
|
|
||||||
|
# Pre-import config and lovelace which have no requirements here to avoid
|
||||||
|
# loading them at run time and blocking the event loop. We do this ahead
|
||||||
|
# of time so that we do not have to flag frontends deps with `import_executor`
|
||||||
|
# as it would create a thundering heard of executor jobs trying to import
|
||||||
|
# frontend deps at the same time.
|
||||||
|
from .components import (
|
||||||
|
api as api_pre_import, # noqa: F401
|
||||||
|
config as config_pre_import, # noqa: F401
|
||||||
|
http,
|
||||||
|
lovelace as lovelace_pre_import, # noqa: F401
|
||||||
|
)
|
||||||
from .const import (
|
from .const import (
|
||||||
FORMAT_DATETIME,
|
FORMAT_DATETIME,
|
||||||
|
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||||
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||||
REQUIRED_NEXT_PYTHON_VER,
|
REQUIRED_NEXT_PYTHON_VER,
|
||||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||||
@ -31,21 +47,25 @@ from .helpers import (
|
|||||||
device_registry,
|
device_registry,
|
||||||
entity,
|
entity,
|
||||||
entity_registry,
|
entity_registry,
|
||||||
|
floor_registry,
|
||||||
issue_registry,
|
issue_registry,
|
||||||
|
label_registry,
|
||||||
recorder,
|
recorder,
|
||||||
restore_state,
|
restore_state,
|
||||||
template,
|
template,
|
||||||
|
translation,
|
||||||
)
|
)
|
||||||
from .helpers.dispatcher import async_dispatcher_send
|
from .helpers.dispatcher import async_dispatcher_send
|
||||||
from .helpers.typing import ConfigType
|
from .helpers.typing import ConfigType
|
||||||
from .setup import (
|
from .setup import (
|
||||||
|
BASE_PLATFORMS,
|
||||||
DATA_SETUP_STARTED,
|
DATA_SETUP_STARTED,
|
||||||
DATA_SETUP_TIME,
|
DATA_SETUP_TIME,
|
||||||
async_notify_setup_error,
|
async_notify_setup_error,
|
||||||
async_set_domains_to_be_loaded,
|
async_set_domains_to_be_loaded,
|
||||||
async_setup_component,
|
async_setup_component,
|
||||||
)
|
)
|
||||||
from .util import dt as dt_util
|
from .util.async_ import create_eager_task
|
||||||
from .util.logging import async_activate_log_queue_handler
|
from .util.logging import async_activate_log_queue_handler
|
||||||
from .util.package import async_get_user_site, is_virtual_env
|
from .util.package import async_get_user_site, is_virtual_env
|
||||||
|
|
||||||
@ -57,7 +77,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||||
|
|
||||||
# hass.data key for logging information.
|
# hass.data key for logging information.
|
||||||
DATA_LOGGING = "logging"
|
|
||||||
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
|
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
|
||||||
|
|
||||||
LOG_SLOW_STARTUP_INTERVAL = 60
|
LOG_SLOW_STARTUP_INTERVAL = 60
|
||||||
@ -110,6 +129,7 @@ DEFAULT_INTEGRATIONS = {
|
|||||||
#
|
#
|
||||||
# Integrations providing core functionality:
|
# Integrations providing core functionality:
|
||||||
"application_credentials",
|
"application_credentials",
|
||||||
|
"backup",
|
||||||
"frontend",
|
"frontend",
|
||||||
"hardware",
|
"hardware",
|
||||||
"logger",
|
"logger",
|
||||||
@ -143,15 +163,22 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
|||||||
# These integrations are set up if using the Supervisor
|
# These integrations are set up if using the Supervisor
|
||||||
"hassio",
|
"hassio",
|
||||||
}
|
}
|
||||||
DEFAULT_INTEGRATIONS_NON_SUPERVISOR = {
|
|
||||||
# These integrations are set up if not using the Supervisor
|
|
||||||
"backup",
|
|
||||||
}
|
|
||||||
CRITICAL_INTEGRATIONS = {
|
CRITICAL_INTEGRATIONS = {
|
||||||
# Recovery mode is activated if these integrations fail to set up
|
# Recovery mode is activated if these integrations fail to set up
|
||||||
"frontend",
|
"frontend",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SETUP_ORDER = {
|
||||||
|
# Load logging as soon as possible
|
||||||
|
"logging": LOGGING_INTEGRATIONS,
|
||||||
|
# Setup frontend
|
||||||
|
"frontend": FRONTEND_INTEGRATIONS,
|
||||||
|
# Setup recorder
|
||||||
|
"recorder": RECORDER_INTEGRATIONS,
|
||||||
|
# Start up debuggers. Start these first in case they want to wait.
|
||||||
|
"debugger": DEBUGGER_INTEGRATIONS,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_hass(
|
async def async_setup_hass(
|
||||||
runtime_config: RuntimeConfig,
|
runtime_config: RuntimeConfig,
|
||||||
@ -217,7 +244,7 @@ async def async_setup_hass(
|
|||||||
)
|
)
|
||||||
# Ask integrations to shut down. It's messy but we can't
|
# Ask integrations to shut down. It's messy but we can't
|
||||||
# do a clean stop without knowing what is broken
|
# do a clean stop without knowing what is broken
|
||||||
with contextlib.suppress(asyncio.TimeoutError):
|
with contextlib.suppress(TimeoutError):
|
||||||
async with hass.timeout.async_timeout(10):
|
async with hass.timeout.async_timeout(10):
|
||||||
await hass.async_stop()
|
await hass.async_stop()
|
||||||
|
|
||||||
@ -291,17 +318,20 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
|||||||
platform.uname().processor # pylint: disable=expression-not-assigned
|
platform.uname().processor # pylint: disable=expression-not-assigned
|
||||||
|
|
||||||
# Load the registries and cache the result of platform.uname().processor
|
# Load the registries and cache the result of platform.uname().processor
|
||||||
|
translation.async_setup(hass)
|
||||||
entity.async_setup(hass)
|
entity.async_setup(hass)
|
||||||
template.async_setup(hass)
|
template.async_setup(hass)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
area_registry.async_load(hass),
|
create_eager_task(area_registry.async_load(hass)),
|
||||||
device_registry.async_load(hass),
|
create_eager_task(device_registry.async_load(hass)),
|
||||||
entity_registry.async_load(hass),
|
create_eager_task(entity_registry.async_load(hass)),
|
||||||
issue_registry.async_load(hass),
|
create_eager_task(floor_registry.async_load(hass)),
|
||||||
|
create_eager_task(issue_registry.async_load(hass)),
|
||||||
|
create_eager_task(label_registry.async_load(hass)),
|
||||||
hass.async_add_executor_job(_cache_uname_processor),
|
hass.async_add_executor_job(_cache_uname_processor),
|
||||||
template.async_load_custom_templates(hass),
|
create_eager_task(template.async_load_custom_templates(hass)),
|
||||||
restore_state.async_load(hass),
|
create_eager_task(restore_state.async_load(hass)),
|
||||||
hass.config_entries.async_initialize(),
|
create_eager_task(hass.config_entries.async_initialize()),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -324,7 +354,7 @@ async def async_from_config_dict(
|
|||||||
if not all(
|
if not all(
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
*(
|
*(
|
||||||
async_setup_component(hass, domain, config)
|
create_eager_task(async_setup_component(hass, domain, config))
|
||||||
for domain in CORE_INTEGRATIONS
|
for domain in CORE_INTEGRATIONS
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -533,42 +563,73 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
|||||||
# Add domains depending on if the Supervisor is used or not
|
# Add domains depending on if the Supervisor is used or not
|
||||||
if "SUPERVISOR" in os.environ:
|
if "SUPERVISOR" in os.environ:
|
||||||
domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
|
domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
|
||||||
else:
|
|
||||||
domains.update(DEFAULT_INTEGRATIONS_NON_SUPERVISOR)
|
|
||||||
|
|
||||||
return domains
|
return domains
|
||||||
|
|
||||||
|
|
||||||
async def _async_watch_pending_setups(hass: core.HomeAssistant) -> None:
|
class _WatchPendingSetups:
|
||||||
"""Periodic log of setups that are pending.
|
"""Periodic log and dispatch of setups that are pending."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, hass: core.HomeAssistant, setup_started: dict[str, float]
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the WatchPendingSetups class."""
|
||||||
|
self._hass = hass
|
||||||
|
self._setup_started = setup_started
|
||||||
|
self._duration_count = 0
|
||||||
|
self._handle: asyncio.TimerHandle | None = None
|
||||||
|
self._previous_was_empty = True
|
||||||
|
self._loop = hass.loop
|
||||||
|
|
||||||
|
def _async_watch(self) -> None:
|
||||||
|
"""Periodic log of setups that are pending."""
|
||||||
|
now = monotonic()
|
||||||
|
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
|
||||||
|
|
||||||
Pending for longer than LOG_SLOW_STARTUP_INTERVAL.
|
|
||||||
"""
|
|
||||||
loop_count = 0
|
|
||||||
setup_started: dict[str, datetime] = hass.data[DATA_SETUP_STARTED]
|
|
||||||
previous_was_empty = True
|
|
||||||
while True:
|
|
||||||
now = dt_util.utcnow()
|
|
||||||
remaining_with_setup_started = {
|
remaining_with_setup_started = {
|
||||||
domain: (now - setup_started[domain]).total_seconds()
|
domain: (now - start_time)
|
||||||
for domain in setup_started
|
for domain, start_time in self._setup_started.items()
|
||||||
}
|
}
|
||||||
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
||||||
if remaining_with_setup_started or not previous_was_empty:
|
self._async_dispatch(remaining_with_setup_started)
|
||||||
async_dispatcher_send(
|
if (
|
||||||
hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
|
self._setup_started
|
||||||
)
|
and self._duration_count % LOG_SLOW_STARTUP_INTERVAL == 0
|
||||||
previous_was_empty = not remaining_with_setup_started
|
):
|
||||||
await asyncio.sleep(SLOW_STARTUP_CHECK_INTERVAL)
|
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||||
loop_count += SLOW_STARTUP_CHECK_INTERVAL
|
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||||
|
|
||||||
if loop_count >= LOG_SLOW_STARTUP_INTERVAL and setup_started:
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Waiting on integrations to complete setup: %s",
|
"Waiting on integrations to complete setup: %s",
|
||||||
", ".join(setup_started),
|
", ".join(self._setup_started),
|
||||||
)
|
)
|
||||||
loop_count = 0
|
|
||||||
_LOGGER.debug("Running timeout Zones: %s", hass.timeout.zones)
|
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
|
||||||
|
self._async_schedule_next()
|
||||||
|
|
||||||
|
def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
|
||||||
|
"""Dispatch the signal."""
|
||||||
|
if remaining_with_setup_started or not self._previous_was_empty:
|
||||||
|
async_dispatcher_send(
|
||||||
|
self._hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
|
||||||
|
)
|
||||||
|
self._previous_was_empty = not remaining_with_setup_started
|
||||||
|
|
||||||
|
def _async_schedule_next(self) -> None:
|
||||||
|
"""Schedule the next call."""
|
||||||
|
self._handle = self._loop.call_later(
|
||||||
|
SLOW_STARTUP_CHECK_INTERVAL, self._async_watch
|
||||||
|
)
|
||||||
|
|
||||||
|
def async_start(self) -> None:
|
||||||
|
"""Start watching."""
|
||||||
|
self._async_schedule_next()
|
||||||
|
|
||||||
|
def async_stop(self) -> None:
|
||||||
|
"""Stop watching."""
|
||||||
|
self._async_dispatch({})
|
||||||
|
if self._handle:
|
||||||
|
self._handle.cancel()
|
||||||
|
self._handle = None
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_multi_components(
|
async def async_setup_multi_components(
|
||||||
@ -581,7 +642,9 @@ async def async_setup_multi_components(
|
|||||||
domains_not_yet_setup = domains - hass.config.components
|
domains_not_yet_setup = domains - hass.config.components
|
||||||
futures = {
|
futures = {
|
||||||
domain: hass.async_create_task(
|
domain: hass.async_create_task(
|
||||||
async_setup_component(hass, domain, config), f"setup component {domain}"
|
async_setup_component(hass, domain, config),
|
||||||
|
f"setup component {domain}",
|
||||||
|
eager_start=True,
|
||||||
)
|
)
|
||||||
for domain in domains_not_yet_setup
|
for domain in domains_not_yet_setup
|
||||||
}
|
}
|
||||||
@ -596,17 +659,12 @@ async def async_setup_multi_components(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_set_up_integrations(
|
async def _async_resolve_domains_to_setup(
|
||||||
hass: core.HomeAssistant, config: dict[str, Any]
|
hass: core.HomeAssistant, config: dict[str, Any]
|
||||||
) -> None:
|
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||||
"""Set up all the integrations."""
|
"""Resolve all dependencies and return list of domains to set up."""
|
||||||
hass.data[DATA_SETUP_STARTED] = {}
|
base_platforms_loaded = False
|
||||||
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
|
||||||
|
|
||||||
watch_task = asyncio.create_task(_async_watch_pending_setups(hass))
|
|
||||||
|
|
||||||
domains_to_setup = _get_domains(hass, config)
|
domains_to_setup = _get_domains(hass, config)
|
||||||
|
|
||||||
needed_requirements: set[str] = set()
|
needed_requirements: set[str] = set()
|
||||||
|
|
||||||
# Resolve all dependencies so we know all integrations
|
# Resolve all dependencies so we know all integrations
|
||||||
@ -617,48 +675,58 @@ async def _async_set_up_integrations(
|
|||||||
old_to_resolve: set[str] = to_resolve
|
old_to_resolve: set[str] = to_resolve
|
||||||
to_resolve = set()
|
to_resolve = set()
|
||||||
|
|
||||||
integrations_to_process = [
|
if not base_platforms_loaded:
|
||||||
int_or_exc
|
# Load base platforms right away since
|
||||||
for int_or_exc in (
|
# we do not require the manifest to list
|
||||||
await loader.async_get_integrations(hass, old_to_resolve)
|
# them as dependencies and we want
|
||||||
).values()
|
# to avoid the lock contention when multiple
|
||||||
if isinstance(int_or_exc, loader.Integration)
|
# integrations try to resolve them at once
|
||||||
]
|
base_platforms_loaded = True
|
||||||
|
to_get = {*old_to_resolve, *BASE_PLATFORMS}
|
||||||
|
else:
|
||||||
|
to_get = old_to_resolve
|
||||||
|
|
||||||
manifest_deps: set[str] = set()
|
manifest_deps: set[str] = set()
|
||||||
for itg in integrations_to_process:
|
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
||||||
|
integrations_to_process: list[loader.Integration] = []
|
||||||
|
|
||||||
|
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
||||||
|
if not isinstance(itg, loader.Integration) or domain not in old_to_resolve:
|
||||||
|
continue
|
||||||
|
integrations_to_process.append(itg)
|
||||||
|
integration_cache[domain] = itg
|
||||||
manifest_deps.update(itg.dependencies)
|
manifest_deps.update(itg.dependencies)
|
||||||
manifest_deps.update(itg.after_dependencies)
|
manifest_deps.update(itg.after_dependencies)
|
||||||
needed_requirements.update(itg.requirements)
|
needed_requirements.update(itg.requirements)
|
||||||
|
if not itg.all_dependencies_resolved:
|
||||||
|
resolve_dependencies_tasks.append(
|
||||||
|
create_eager_task(
|
||||||
|
itg.resolve_dependencies(),
|
||||||
|
name=f"resolve dependencies {domain}",
|
||||||
|
loop=hass.loop,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if manifest_deps:
|
if unseen_deps := manifest_deps - integration_cache.keys():
|
||||||
# If there are dependencies, try to preload all
|
# If there are dependencies, try to preload all
|
||||||
# the integrations manifest at once and add them
|
# the integrations manifest at once and add them
|
||||||
# to the list of requirements we need to install
|
# to the list of requirements we need to install
|
||||||
# so we can try to check if they are already installed
|
# so we can try to check if they are already installed
|
||||||
# in a single call below which avoids each integration
|
# in a single call below which avoids each integration
|
||||||
# having to wait for the lock to do it individually
|
# having to wait for the lock to do it individually
|
||||||
deps = await loader.async_get_integrations(hass, manifest_deps)
|
deps = await loader.async_get_integrations(hass, unseen_deps)
|
||||||
for dependant_itg in deps.values():
|
for dependant_domain, dependant_itg in deps.items():
|
||||||
if isinstance(dependant_itg, loader.Integration):
|
if isinstance(dependant_itg, loader.Integration):
|
||||||
|
integration_cache[dependant_domain] = dependant_itg
|
||||||
needed_requirements.update(dependant_itg.requirements)
|
needed_requirements.update(dependant_itg.requirements)
|
||||||
|
|
||||||
resolve_dependencies_tasks = [
|
|
||||||
itg.resolve_dependencies()
|
|
||||||
for itg in integrations_to_process
|
|
||||||
if not itg.all_dependencies_resolved
|
|
||||||
]
|
|
||||||
|
|
||||||
if resolve_dependencies_tasks:
|
if resolve_dependencies_tasks:
|
||||||
await asyncio.gather(*resolve_dependencies_tasks)
|
await asyncio.gather(*resolve_dependencies_tasks)
|
||||||
|
|
||||||
for itg in integrations_to_process:
|
for itg in integrations_to_process:
|
||||||
integration_cache[itg.domain] = itg
|
|
||||||
|
|
||||||
for dep in itg.all_dependencies:
|
for dep in itg.all_dependencies:
|
||||||
if dep in domains_to_setup:
|
if dep in domains_to_setup:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
domains_to_setup.add(dep)
|
domains_to_setup.add(dep)
|
||||||
to_resolve.add(dep)
|
to_resolve.add(dep)
|
||||||
|
|
||||||
@ -670,31 +738,50 @@ async def _async_set_up_integrations(
|
|||||||
hass.async_create_background_task(
|
hass.async_create_background_task(
|
||||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
requirements.async_load_installed_versions(hass, needed_requirements),
|
||||||
"check installed requirements",
|
"check installed requirements",
|
||||||
|
eager_start=True,
|
||||||
|
)
|
||||||
|
# Start loading translations for all integrations we are going to set up
|
||||||
|
# in the background so they are ready when we need them. This avoids a
|
||||||
|
# lot of waiting for the translation load lock and a thundering herd of
|
||||||
|
# tasks trying to load the same translations at the same time as each
|
||||||
|
# integration is loaded.
|
||||||
|
#
|
||||||
|
# We do not wait for this since as soon as the task runs it will
|
||||||
|
# hold the translation load lock and if anything is fast enough to
|
||||||
|
# wait for the translation load lock, loading will be done by the
|
||||||
|
# time it gets to it.
|
||||||
|
hass.async_create_background_task(
|
||||||
|
translation.async_load_integrations(hass, {*BASE_PLATFORMS, *domains_to_setup}),
|
||||||
|
"load translations",
|
||||||
|
eager_start=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return domains_to_setup, integration_cache
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_set_up_integrations(
|
||||||
|
hass: core.HomeAssistant, config: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Set up all the integrations."""
|
||||||
|
setup_started: dict[str, float] = {}
|
||||||
|
hass.data[DATA_SETUP_STARTED] = setup_started
|
||||||
|
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||||
|
|
||||||
|
watcher = _WatchPendingSetups(hass, setup_started)
|
||||||
|
watcher.async_start()
|
||||||
|
|
||||||
|
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||||
|
hass, config
|
||||||
)
|
)
|
||||||
|
|
||||||
# Initialize recorder
|
# Initialize recorder
|
||||||
if "recorder" in domains_to_setup:
|
if "recorder" in domains_to_setup:
|
||||||
recorder.async_initialize_recorder(hass)
|
recorder.async_initialize_recorder(hass)
|
||||||
|
|
||||||
# Load logging as soon as possible
|
pre_stage_domains: dict[str, set[str]] = {
|
||||||
if logging_domains := domains_to_setup & LOGGING_INTEGRATIONS:
|
name: domains_to_setup & domain_group
|
||||||
_LOGGER.info("Setting up logging: %s", logging_domains)
|
for name, domain_group in SETUP_ORDER.items()
|
||||||
await async_setup_multi_components(hass, logging_domains, config)
|
}
|
||||||
|
|
||||||
# Setup frontend
|
|
||||||
if frontend_domains := domains_to_setup & FRONTEND_INTEGRATIONS:
|
|
||||||
_LOGGER.info("Setting up frontend: %s", frontend_domains)
|
|
||||||
await async_setup_multi_components(hass, frontend_domains, config)
|
|
||||||
|
|
||||||
# Setup recorder
|
|
||||||
if recorder_domains := domains_to_setup & RECORDER_INTEGRATIONS:
|
|
||||||
_LOGGER.info("Setting up recorder: %s", recorder_domains)
|
|
||||||
await async_setup_multi_components(hass, recorder_domains, config)
|
|
||||||
|
|
||||||
# Start up debuggers. Start these first in case they want to wait.
|
|
||||||
if debuggers := domains_to_setup & DEBUGGER_INTEGRATIONS:
|
|
||||||
_LOGGER.debug("Setting up debuggers: %s", debuggers)
|
|
||||||
await async_setup_multi_components(hass, debuggers, config)
|
|
||||||
|
|
||||||
# calculate what components to setup in what stage
|
# calculate what components to setup in what stage
|
||||||
stage_1_domains: set[str] = set()
|
stage_1_domains: set[str] = set()
|
||||||
@ -718,14 +805,13 @@ async def _async_set_up_integrations(
|
|||||||
|
|
||||||
deps_promotion.update(dep_itg.all_dependencies)
|
deps_promotion.update(dep_itg.all_dependencies)
|
||||||
|
|
||||||
stage_2_domains = (
|
stage_2_domains = domains_to_setup - stage_1_domains
|
||||||
domains_to_setup
|
|
||||||
- logging_domains
|
for name, domain_group in pre_stage_domains.items():
|
||||||
- frontend_domains
|
if domain_group:
|
||||||
- recorder_domains
|
stage_2_domains -= domain_group
|
||||||
- debuggers
|
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||||
- stage_1_domains
|
await async_setup_multi_components(hass, domain_group, config)
|
||||||
)
|
|
||||||
|
|
||||||
# Enables after dependencies when setting up stage 1 domains
|
# Enables after dependencies when setting up stage 1 domains
|
||||||
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
||||||
@ -738,7 +824,7 @@ async def _async_set_up_integrations(
|
|||||||
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||||
):
|
):
|
||||||
await async_setup_multi_components(hass, stage_1_domains, config)
|
await async_setup_multi_components(hass, stage_1_domains, config)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
|
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
|
||||||
|
|
||||||
# Add after dependencies when setting up stage 2 domains
|
# Add after dependencies when setting up stage 2 domains
|
||||||
@ -751,7 +837,7 @@ async def _async_set_up_integrations(
|
|||||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||||
):
|
):
|
||||||
await async_setup_multi_components(hass, stage_2_domains, config)
|
await async_setup_multi_components(hass, stage_2_domains, config)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
|
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
|
||||||
|
|
||||||
# Wrap up startup
|
# Wrap up startup
|
||||||
@ -759,18 +845,12 @@ async def _async_set_up_integrations(
|
|||||||
try:
|
try:
|
||||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning("Setup timed out for bootstrap - moving forward")
|
_LOGGER.warning("Setup timed out for bootstrap - moving forward")
|
||||||
|
|
||||||
watch_task.cancel()
|
watcher.async_stop()
|
||||||
async_dispatcher_send(hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, {})
|
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Integration setup times: %s",
|
"Integration setup times: %s",
|
||||||
{
|
dict(sorted(setup_time.items(), key=itemgetter(1))),
|
||||||
integration: timedelta.total_seconds()
|
|
||||||
for integration, timedelta in sorted(
|
|
||||||
setup_time.items(), key=lambda item: item[1].total_seconds()
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Adds config flow for AccuWeather."""
|
"""Adds config flow for AccuWeather."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -61,7 +60,7 @@ class AccuWeatherFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
longitude=user_input[CONF_LONGITUDE],
|
longitude=user_input[CONF_LONGITUDE],
|
||||||
)
|
)
|
||||||
await accuweather.async_get_location()
|
await accuweather.async_get_location()
|
||||||
except (ApiError, ClientConnectorError, asyncio.TimeoutError, ClientError):
|
except (ApiError, ClientConnectorError, TimeoutError, ClientError):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except InvalidApiKeyError:
|
except InvalidApiKeyError:
|
||||||
errors[CONF_API_KEY] = "invalid_api_key"
|
errors[CONF_API_KEY] = "invalid_api_key"
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Config flow for Rollease Acmeda Automate Pulse Hub."""
|
"""Config flow for Rollease Acmeda Automate Pulse Hub."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@ -42,7 +41,7 @@ class AcmedaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
}
|
}
|
||||||
|
|
||||||
hubs: list[aiopulse.Hub] = []
|
hubs: list[aiopulse.Hub] = []
|
||||||
with suppress(asyncio.TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with timeout(5):
|
async with timeout(5):
|
||||||
async for hub in aiopulse.Hub.discover():
|
async for hub in aiopulse.Hub.discover():
|
||||||
if hub.id not in already_configured:
|
if hub.id not in already_configured:
|
||||||
|
1
homeassistant/components/acomax/__init__.py
Normal file
1
homeassistant/components/acomax/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""Virtual integration: Acomax."""
|
6
homeassistant/components/acomax/manifest.json
Normal file
6
homeassistant/components/acomax/manifest.json
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"domain": "acomax",
|
||||||
|
"name": "Acomax",
|
||||||
|
"integration_type": "virtual",
|
||||||
|
"supported_by": "motion_blinds"
|
||||||
|
}
|
@ -303,7 +303,7 @@ class AdsEntity(Entity):
|
|||||||
try:
|
try:
|
||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
await self._event.wait()
|
await self._event.wait()
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.debug("Variable %s: Timeout during first update", ads_var)
|
_LOGGER.debug("Variable %s: Timeout during first update", ads_var)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -17,7 +17,8 @@ from homeassistant.components.climate import (
|
|||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import ServiceValidationError
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -49,6 +50,24 @@ ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
|||||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||||
ADVANTAGE_AIR_MYFAN = "autoAA"
|
ADVANTAGE_AIR_MYFAN = "autoAA"
|
||||||
|
|
||||||
|
HVAC_MODES = [
|
||||||
|
HVACMode.OFF,
|
||||||
|
HVACMode.COOL,
|
||||||
|
HVACMode.HEAT,
|
||||||
|
HVACMode.FAN_ONLY,
|
||||||
|
HVACMode.DRY,
|
||||||
|
]
|
||||||
|
HVAC_MODES_MYAUTO = HVAC_MODES + [HVACMode.HEAT_COOL]
|
||||||
|
SUPPORTED_FEATURES = (
|
||||||
|
ClimateEntityFeature.FAN_MODE
|
||||||
|
| ClimateEntityFeature.TURN_OFF
|
||||||
|
| ClimateEntityFeature.TURN_ON
|
||||||
|
)
|
||||||
|
SUPPORTED_FEATURES_MYZONE = SUPPORTED_FEATURES | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
SUPPORTED_FEATURES_MYAUTO = (
|
||||||
|
SUPPORTED_FEATURES | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||||
|
)
|
||||||
|
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -84,34 +103,56 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
_attr_min_temp = 16
|
_attr_min_temp = 16
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
_enable_turn_on_off_backwards_compatibility = False
|
_enable_turn_on_off_backwards_compatibility = False
|
||||||
|
_support_preset = ClimateEntityFeature(0)
|
||||||
|
|
||||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||||
"""Initialize an AdvantageAir AC unit."""
|
"""Initialize an AdvantageAir AC unit."""
|
||||||
super().__init__(instance, ac_key)
|
super().__init__(instance, ac_key)
|
||||||
|
|
||||||
self._attr_supported_features = (
|
self._attr_preset_modes = [ADVANTAGE_AIR_MYZONE]
|
||||||
ClimateEntityFeature.FAN_MODE
|
|
||||||
| ClimateEntityFeature.TURN_OFF
|
# Add "MyTemp" preset if available
|
||||||
| ClimateEntityFeature.TURN_ON
|
if ADVANTAGE_AIR_MYTEMP_ENABLED in self._ac:
|
||||||
)
|
self._attr_preset_modes += [ADVANTAGE_AIR_MYTEMP]
|
||||||
self._attr_hvac_modes = [
|
self._support_preset = ClimateEntityFeature.PRESET_MODE
|
||||||
HVACMode.OFF,
|
|
||||||
HVACMode.COOL,
|
# Add "MyAuto" preset if available
|
||||||
HVACMode.HEAT,
|
if ADVANTAGE_AIR_MYAUTO_ENABLED in self._ac:
|
||||||
HVACMode.FAN_ONLY,
|
self._attr_preset_modes += [ADVANTAGE_AIR_MYAUTO]
|
||||||
HVACMode.DRY,
|
self._support_preset = ClimateEntityFeature.PRESET_MODE
|
||||||
]
|
|
||||||
# Set supported features and HVAC modes based on current operating mode
|
# Setup attributes based on current preset
|
||||||
|
self._async_configure_preset()
|
||||||
|
|
||||||
|
def _async_configure_preset(self) -> None:
|
||||||
|
"""Configure attributes based on preset."""
|
||||||
|
|
||||||
|
# Preset Changes
|
||||||
if self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED):
|
if self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED):
|
||||||
# MyAuto
|
# MyAuto
|
||||||
self._attr_supported_features |= (
|
self._attr_preset_mode = ADVANTAGE_AIR_MYAUTO
|
||||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
self._attr_hvac_modes = HVAC_MODES_MYAUTO
|
||||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
self._attr_supported_features = (
|
||||||
|
SUPPORTED_FEATURES_MYAUTO | self._support_preset
|
||||||
)
|
)
|
||||||
self._attr_hvac_modes += [HVACMode.HEAT_COOL]
|
elif self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
|
||||||
elif not self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
|
# MyTemp
|
||||||
|
self._attr_preset_mode = ADVANTAGE_AIR_MYTEMP
|
||||||
|
self._attr_hvac_modes = HVAC_MODES
|
||||||
|
self._attr_supported_features = SUPPORTED_FEATURES | self._support_preset
|
||||||
|
else:
|
||||||
# MyZone
|
# MyZone
|
||||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
self._attr_preset_mode = ADVANTAGE_AIR_MYZONE
|
||||||
|
self._attr_hvac_modes = HVAC_MODES
|
||||||
|
self._attr_supported_features = (
|
||||||
|
SUPPORTED_FEATURES_MYZONE | self._support_preset
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
self._async_configure_preset()
|
||||||
|
super()._handle_coordinator_update()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_temperature(self) -> float | None:
|
def current_temperature(self) -> float | None:
|
||||||
@ -124,11 +165,7 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
def target_temperature(self) -> float | None:
|
def target_temperature(self) -> float | None:
|
||||||
"""Return the current target temperature."""
|
"""Return the current target temperature."""
|
||||||
# If the system is in MyZone mode, and a zone is set, return that temperature instead.
|
# If the system is in MyZone mode, and a zone is set, return that temperature instead.
|
||||||
if (
|
if self._myzone and self.preset_mode == ADVANTAGE_AIR_MYZONE:
|
||||||
self._myzone
|
|
||||||
and not self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED)
|
|
||||||
and not self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED)
|
|
||||||
):
|
|
||||||
return self._myzone["setTemp"]
|
return self._myzone["setTemp"]
|
||||||
return self._ac["setTemp"]
|
return self._ac["setTemp"]
|
||||||
|
|
||||||
@ -169,14 +206,15 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
"""Set the HVAC Mode and State."""
|
"""Set the HVAC Mode and State."""
|
||||||
if hvac_mode == HVACMode.OFF:
|
if hvac_mode == HVACMode.OFF:
|
||||||
await self.async_update_ac({"state": ADVANTAGE_AIR_STATE_OFF})
|
return await self.async_turn_off()
|
||||||
else:
|
if hvac_mode == HVACMode.HEAT_COOL and self.preset_mode != ADVANTAGE_AIR_MYAUTO:
|
||||||
await self.async_update_ac(
|
raise ServiceValidationError("Heat/Cool is not supported in this mode")
|
||||||
{
|
await self.async_update_ac(
|
||||||
"state": ADVANTAGE_AIR_STATE_ON,
|
{
|
||||||
"mode": HASS_HVAC_MODES.get(hvac_mode),
|
"state": ADVANTAGE_AIR_STATE_ON,
|
||||||
}
|
"mode": HASS_HVAC_MODES.get(hvac_mode),
|
||||||
)
|
}
|
||||||
|
)
|
||||||
|
|
||||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||||
"""Set the Fan Mode."""
|
"""Set the Fan Mode."""
|
||||||
@ -198,6 +236,16 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set the preset mode."""
|
||||||
|
change = {}
|
||||||
|
if ADVANTAGE_AIR_MYTEMP_ENABLED in self._ac:
|
||||||
|
change[ADVANTAGE_AIR_MYTEMP_ENABLED] = preset_mode == ADVANTAGE_AIR_MYTEMP
|
||||||
|
if ADVANTAGE_AIR_MYAUTO_ENABLED in self._ac:
|
||||||
|
change[ADVANTAGE_AIR_MYAUTO_ENABLED] = preset_mode == ADVANTAGE_AIR_MYAUTO
|
||||||
|
if change:
|
||||||
|
await self.async_update_ac(change)
|
||||||
|
|
||||||
|
|
||||||
class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||||
"""AdvantageAir MyTemp Zone control."""
|
"""AdvantageAir MyTemp Zone control."""
|
||||||
|
@ -31,7 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
longitude = entry.data[CONF_LONGITUDE]
|
longitude = entry.data[CONF_LONGITUDE]
|
||||||
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
||||||
|
|
||||||
options = ConnectionOptions(api_key, station_updates, True)
|
options = ConnectionOptions(api_key, station_updates)
|
||||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||||
try:
|
try:
|
||||||
await aemet.select_coordinates(latitude, longitude)
|
await aemet.select_coordinates(latitude, longitude)
|
||||||
|
@ -21,7 +21,7 @@ from .const import CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
|
|||||||
|
|
||||||
OPTIONS_SCHEMA = vol.Schema(
|
OPTIONS_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_STATION_UPDATES): bool,
|
vol.Required(CONF_STATION_UPDATES, default=True): bool,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
OPTIONS_FLOW = {
|
OPTIONS_FLOW = {
|
||||||
@ -45,7 +45,7 @@ class AemetConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
options = ConnectionOptions(user_input[CONF_API_KEY], False, True)
|
options = ConnectionOptions(user_input[CONF_API_KEY], False)
|
||||||
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
||||||
try:
|
try:
|
||||||
await aemet.select_coordinates(latitude, longitude)
|
await aemet.select_coordinates(latitude, longitude)
|
||||||
|
@ -20,7 +20,7 @@ from aemet_opendata.const import (
|
|||||||
AOD_TEMP,
|
AOD_TEMP,
|
||||||
AOD_TEMP_MAX,
|
AOD_TEMP_MAX,
|
||||||
AOD_TEMP_MIN,
|
AOD_TEMP_MIN,
|
||||||
AOD_TIMESTAMP,
|
AOD_TIMESTAMP_UTC,
|
||||||
AOD_WIND_DIRECTION,
|
AOD_WIND_DIRECTION,
|
||||||
AOD_WIND_SPEED,
|
AOD_WIND_SPEED,
|
||||||
AOD_WIND_SPEED_MAX,
|
AOD_WIND_SPEED_MAX,
|
||||||
@ -105,7 +105,7 @@ FORECAST_MAP = {
|
|||||||
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
AOD_TEMP_MAX: ATTR_FORECAST_NATIVE_TEMP,
|
AOD_TEMP_MAX: ATTR_FORECAST_NATIVE_TEMP,
|
||||||
AOD_TEMP_MIN: ATTR_FORECAST_NATIVE_TEMP_LOW,
|
AOD_TEMP_MIN: ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||||
AOD_TIMESTAMP: ATTR_FORECAST_TIME,
|
AOD_TIMESTAMP_UTC: ATTR_FORECAST_TIME,
|
||||||
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
||||||
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
},
|
},
|
||||||
@ -114,7 +114,7 @@ FORECAST_MAP = {
|
|||||||
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
AOD_PRECIPITATION: ATTR_FORECAST_NATIVE_PRECIPITATION,
|
AOD_PRECIPITATION: ATTR_FORECAST_NATIVE_PRECIPITATION,
|
||||||
AOD_TEMP: ATTR_FORECAST_NATIVE_TEMP,
|
AOD_TEMP: ATTR_FORECAST_NATIVE_TEMP,
|
||||||
AOD_TIMESTAMP: ATTR_FORECAST_TIME,
|
AOD_TIMESTAMP_UTC: ATTR_FORECAST_TIME,
|
||||||
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
||||||
AOD_WIND_SPEED_MAX: ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
AOD_WIND_SPEED_MAX: ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||||
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
44
homeassistant/components/aemet/diagnostics.py
Normal file
44
homeassistant/components/aemet/diagnostics.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
"""Support for the AEMET OpenData diagnostics."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aemet_opendata.const import AOD_COORDS
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics.util import async_redact_data
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import (
|
||||||
|
CONF_API_KEY,
|
||||||
|
CONF_LATITUDE,
|
||||||
|
CONF_LONGITUDE,
|
||||||
|
CONF_UNIQUE_ID,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .const import DOMAIN, ENTRY_WEATHER_COORDINATOR
|
||||||
|
from .coordinator import WeatherUpdateCoordinator
|
||||||
|
|
||||||
|
TO_REDACT_CONFIG = [
|
||||||
|
CONF_API_KEY,
|
||||||
|
CONF_LATITUDE,
|
||||||
|
CONF_LONGITUDE,
|
||||||
|
CONF_UNIQUE_ID,
|
||||||
|
]
|
||||||
|
|
||||||
|
TO_REDACT_COORD = [
|
||||||
|
AOD_COORDS,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, config_entry: ConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
aemet_entry = hass.data[DOMAIN][config_entry.entry_id]
|
||||||
|
coordinator: WeatherUpdateCoordinator = aemet_entry[ENTRY_WEATHER_COORDINATOR]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"api_data": coordinator.aemet.raw_data(),
|
||||||
|
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT_CONFIG),
|
||||||
|
"coord_data": async_redact_data(coordinator.data, TO_REDACT_COORD),
|
||||||
|
}
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aemet_opendata"],
|
"loggers": ["aemet_opendata"],
|
||||||
"requirements": ["AEMET-OpenData==0.4.7"]
|
"requirements": ["AEMET-OpenData==0.5.1"]
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ from aemet_opendata.const import (
|
|||||||
AOD_TEMP,
|
AOD_TEMP,
|
||||||
AOD_TEMP_MAX,
|
AOD_TEMP_MAX,
|
||||||
AOD_TEMP_MIN,
|
AOD_TEMP_MIN,
|
||||||
AOD_TIMESTAMP,
|
AOD_TIMESTAMP_UTC,
|
||||||
AOD_TOWN,
|
AOD_TOWN,
|
||||||
AOD_WEATHER,
|
AOD_WEATHER,
|
||||||
AOD_WIND_DIRECTION,
|
AOD_WIND_DIRECTION,
|
||||||
@ -171,7 +171,7 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
),
|
),
|
||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
key=f"forecast-daily-{ATTR_API_FORECAST_TIME}",
|
key=f"forecast-daily-{ATTR_API_FORECAST_TIME}",
|
||||||
keys=[AOD_TOWN, AOD_FORECAST_DAILY, AOD_FORECAST_CURRENT, AOD_TIMESTAMP],
|
keys=[AOD_TOWN, AOD_FORECAST_DAILY, AOD_FORECAST_CURRENT, AOD_TIMESTAMP_UTC],
|
||||||
name="Daily forecast time",
|
name="Daily forecast time",
|
||||||
device_class=SensorDeviceClass.TIMESTAMP,
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
value_fn=dt_util.parse_datetime,
|
value_fn=dt_util.parse_datetime,
|
||||||
@ -179,7 +179,7 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
key=f"forecast-hourly-{ATTR_API_FORECAST_TIME}",
|
key=f"forecast-hourly-{ATTR_API_FORECAST_TIME}",
|
||||||
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_FORECAST_CURRENT, AOD_TIMESTAMP],
|
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_FORECAST_CURRENT, AOD_TIMESTAMP_UTC],
|
||||||
name="Hourly forecast time",
|
name="Hourly forecast time",
|
||||||
device_class=SensorDeviceClass.TIMESTAMP,
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
value_fn=dt_util.parse_datetime,
|
value_fn=dt_util.parse_datetime,
|
||||||
@ -286,7 +286,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
),
|
),
|
||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
key=ATTR_API_STATION_TIMESTAMP,
|
key=ATTR_API_STATION_TIMESTAMP,
|
||||||
keys=[AOD_STATION, AOD_TIMESTAMP],
|
keys=[AOD_STATION, AOD_TIMESTAMP_UTC],
|
||||||
name="Station timestamp",
|
name="Station timestamp",
|
||||||
device_class=SensorDeviceClass.TIMESTAMP,
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
value_fn=dt_util.parse_datetime,
|
value_fn=dt_util.parse_datetime,
|
||||||
@ -326,7 +326,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
),
|
),
|
||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
key=ATTR_API_TOWN_TIMESTAMP,
|
key=ATTR_API_TOWN_TIMESTAMP,
|
||||||
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_TIMESTAMP],
|
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_TIMESTAMP_UTC],
|
||||||
name="Town timestamp",
|
name="Town timestamp",
|
||||||
device_class=SensorDeviceClass.TIMESTAMP,
|
device_class=SensorDeviceClass.TIMESTAMP,
|
||||||
value_fn=dt_util.parse_datetime,
|
value_fn=dt_util.parse_datetime,
|
||||||
|
@ -22,8 +22,6 @@ CONF_TRACKING_NUMBER: Final = "tracking_number"
|
|||||||
DEFAULT_NAME: Final = "aftership"
|
DEFAULT_NAME: Final = "aftership"
|
||||||
UPDATE_TOPIC: Final = f"{DOMAIN}_update"
|
UPDATE_TOPIC: Final = f"{DOMAIN}_update"
|
||||||
|
|
||||||
ICON: Final = "mdi:package-variant-closed"
|
|
||||||
|
|
||||||
MIN_TIME_BETWEEN_UPDATES: Final = timedelta(minutes=15)
|
MIN_TIME_BETWEEN_UPDATES: Final = timedelta(minutes=15)
|
||||||
|
|
||||||
SERVICE_ADD_TRACKING: Final = "add_tracking"
|
SERVICE_ADD_TRACKING: Final = "add_tracking"
|
||||||
|
13
homeassistant/components/aftership/icons.json
Normal file
13
homeassistant/components/aftership/icons.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"packages": {
|
||||||
|
"default": "mdi:package-variant-closed"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"services": {
|
||||||
|
"add_tracking": "mdi:package-variant-plus",
|
||||||
|
"remove_tracking": "mdi:package-variant-minus"
|
||||||
|
}
|
||||||
|
}
|
@ -35,7 +35,6 @@ from .const import (
|
|||||||
CONF_TRACKING_NUMBER,
|
CONF_TRACKING_NUMBER,
|
||||||
DEFAULT_NAME,
|
DEFAULT_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
ICON,
|
|
||||||
MIN_TIME_BETWEEN_UPDATES,
|
MIN_TIME_BETWEEN_UPDATES,
|
||||||
REMOVE_TRACKING_SERVICE_SCHEMA,
|
REMOVE_TRACKING_SERVICE_SCHEMA,
|
||||||
SERVICE_ADD_TRACKING,
|
SERVICE_ADD_TRACKING,
|
||||||
@ -135,7 +134,7 @@ class AfterShipSensor(SensorEntity):
|
|||||||
|
|
||||||
_attr_attribution = ATTRIBUTION
|
_attr_attribution = ATTRIBUTION
|
||||||
_attr_native_unit_of_measurement: str = "packages"
|
_attr_native_unit_of_measurement: str = "packages"
|
||||||
_attr_icon: str = ICON
|
_attr_translation_key = "packages"
|
||||||
|
|
||||||
def __init__(self, aftership: AfterShip, name: str) -> None:
|
def __init__(self, aftership: AfterShip, name: str) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
|
@ -77,9 +77,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
new_data = entry.data.copy()
|
new_data = entry.data.copy()
|
||||||
del new_data[CONF_RADIUS]
|
del new_data[CONF_RADIUS]
|
||||||
|
|
||||||
entry.version = 2
|
|
||||||
hass.config_entries.async_update_entry(
|
hass.config_entries.async_update_entry(
|
||||||
entry, data=new_data, options=new_options
|
entry, data=new_data, options=new_options, version=2
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.info("Migration to version %s successful", entry.version)
|
_LOGGER.info("Migration to version %s successful", entry.version)
|
||||||
|
@ -23,6 +23,13 @@ from .const import DOMAIN, MFCT_ID
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SERVICE_UUIDS = [
|
||||||
|
"b42e1f6e-ade7-11e4-89d3-123b93f75cba",
|
||||||
|
"b42e4a8e-ade7-11e4-89d3-123b93f75cba",
|
||||||
|
"b42e1c08-ade7-11e4-89d3-123b93f75cba",
|
||||||
|
"b42e3882-ade7-11e4-89d3-123b93f75cba",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
class Discovery:
|
class Discovery:
|
||||||
@ -147,6 +154,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
if MFCT_ID not in discovery_info.manufacturer_data:
|
if MFCT_ID not in discovery_info.manufacturer_data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device_data(discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
|
@ -242,7 +242,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
# 1 -> 2: One geography per config entry
|
# 1 -> 2: One geography per config entry
|
||||||
if version == 1:
|
if version == 1:
|
||||||
version = entry.version = 2
|
version = 2
|
||||||
|
|
||||||
# Update the config entry to only include the first geography (there is always
|
# Update the config entry to only include the first geography (there is always
|
||||||
# guaranteed to be at least one):
|
# guaranteed to be at least one):
|
||||||
@ -255,6 +255,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
unique_id=first_id,
|
unique_id=first_id,
|
||||||
title=f"Cloud API ({first_id})",
|
title=f"Cloud API ({first_id})",
|
||||||
data={CONF_API_KEY: entry.data[CONF_API_KEY], **first_geography},
|
data={CONF_API_KEY: entry.data[CONF_API_KEY], **first_geography},
|
||||||
|
version=version,
|
||||||
)
|
)
|
||||||
|
|
||||||
# For any geographies that remain, create a new config entry for each one:
|
# For any geographies that remain, create a new config entry for each one:
|
||||||
@ -379,7 +380,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
entry.version = version
|
hass.config_entries.async_update_entry(entry, version=version)
|
||||||
|
|
||||||
LOGGER.info("Migration to version %s successful", version)
|
LOGGER.info("Migration to version %s successful", version)
|
||||||
|
|
||||||
|
12
homeassistant/components/airvisual/icons.json
Normal file
12
homeassistant/components/airvisual/icons.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"pollutant_level": {
|
||||||
|
"default": "mdi:gauge"
|
||||||
|
},
|
||||||
|
"pollutant_label": {
|
||||||
|
"default": "mdi:chemical-weapon"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -5,6 +5,7 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["airvisual_pro"],
|
"dependencies": ["airvisual_pro"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airvisual",
|
"documentation": "https://www.home-assistant.io/integrations/airvisual",
|
||||||
|
"import_executor": true,
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pyairvisual", "pysmb"],
|
"loggers": ["pyairvisual", "pysmb"],
|
||||||
|
@ -42,7 +42,6 @@ GEOGRAPHY_SENSOR_DESCRIPTIONS = (
|
|||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=SENSOR_KIND_LEVEL,
|
key=SENSOR_KIND_LEVEL,
|
||||||
name="Air pollution level",
|
name="Air pollution level",
|
||||||
icon="mdi:gauge",
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=[
|
options=[
|
||||||
"good",
|
"good",
|
||||||
@ -63,7 +62,6 @@ GEOGRAPHY_SENSOR_DESCRIPTIONS = (
|
|||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=SENSOR_KIND_POLLUTANT,
|
key=SENSOR_KIND_POLLUTANT,
|
||||||
name="Main pollutant",
|
name="Main pollutant",
|
||||||
icon="mdi:chemical-weapon",
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=["co", "n2", "o3", "p1", "p2", "s2"],
|
options=["co", "n2", "o3", "p1", "p2", "s2"],
|
||||||
translation_key="pollutant_label",
|
translation_key="pollutant_label",
|
||||||
|
@ -11,5 +11,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairzone"],
|
"loggers": ["aioairzone"],
|
||||||
"requirements": ["aioairzone==0.7.4"]
|
"requirements": ["aioairzone==0.7.6"]
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
options = ConnectionOptions(
|
options = ConnectionOptions(
|
||||||
entry.data[CONF_USERNAME],
|
entry.data[CONF_USERNAME],
|
||||||
entry.data[CONF_PASSWORD],
|
entry.data[CONF_PASSWORD],
|
||||||
|
True,
|
||||||
)
|
)
|
||||||
|
|
||||||
airzone = AirzoneCloudApi(aiohttp_client.async_get_clientsession(hass), options)
|
airzone = AirzoneCloudApi(aiohttp_client.async_get_clientsession(hass), options)
|
||||||
|
@ -7,6 +7,7 @@ from typing import Any, Final
|
|||||||
from aioairzone_cloud.const import (
|
from aioairzone_cloud.const import (
|
||||||
AZD_ACTIVE,
|
AZD_ACTIVE,
|
||||||
AZD_AIDOOS,
|
AZD_AIDOOS,
|
||||||
|
AZD_AQ_ACTIVE,
|
||||||
AZD_ERRORS,
|
AZD_ERRORS,
|
||||||
AZD_PROBLEMS,
|
AZD_PROBLEMS,
|
||||||
AZD_SYSTEMS,
|
AZD_SYSTEMS,
|
||||||
@ -76,6 +77,10 @@ ZONE_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]
|
|||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
key=AZD_ACTIVE,
|
key=AZD_ACTIVE,
|
||||||
),
|
),
|
||||||
|
AirzoneBinarySensorEntityDescription(
|
||||||
|
key=AZD_AQ_ACTIVE,
|
||||||
|
translation_key="air_quality_active",
|
||||||
|
),
|
||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
attributes={
|
attributes={
|
||||||
"warnings": AZD_WARNINGS,
|
"warnings": AZD_WARNINGS,
|
||||||
|
@ -94,6 +94,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
ConnectionOptions(
|
ConnectionOptions(
|
||||||
user_input[CONF_USERNAME],
|
user_input[CONF_USERNAME],
|
||||||
user_input[CONF_PASSWORD],
|
user_input[CONF_PASSWORD],
|
||||||
|
False,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioairzone_cloud"],
|
"loggers": ["aioairzone_cloud"],
|
||||||
"requirements": ["aioairzone-cloud==0.3.8"]
|
"requirements": ["aioairzone-cloud==0.4.5"]
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,10 @@ from typing import Any, Final
|
|||||||
|
|
||||||
from aioairzone_cloud.const import (
|
from aioairzone_cloud.const import (
|
||||||
AZD_AIDOOS,
|
AZD_AIDOOS,
|
||||||
|
AZD_AQ_INDEX,
|
||||||
|
AZD_AQ_PM_1,
|
||||||
|
AZD_AQ_PM_2P5,
|
||||||
|
AZD_AQ_PM_10,
|
||||||
AZD_HUMIDITY,
|
AZD_HUMIDITY,
|
||||||
AZD_TEMP,
|
AZD_TEMP,
|
||||||
AZD_WEBSERVERS,
|
AZD_WEBSERVERS,
|
||||||
@ -20,6 +24,7 @@ from homeassistant.components.sensor import (
|
|||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
|
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
@ -58,6 +63,29 @@ WEBSERVER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||||
|
SensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.AQI,
|
||||||
|
key=AZD_AQ_INDEX,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.PM1,
|
||||||
|
key=AZD_AQ_PM_1,
|
||||||
|
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.PM25,
|
||||||
|
key=AZD_AQ_PM_2P5,
|
||||||
|
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
|
SensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.PM10,
|
||||||
|
key=AZD_AQ_PM_10,
|
||||||
|
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
key=AZD_TEMP,
|
key=AZD_TEMP,
|
||||||
|
@ -15,5 +15,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"air_quality_active": {
|
||||||
|
"name": "Air Quality active"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""The aladdin_connect component."""
|
"""The aladdin_connect component."""
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
@ -29,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await acc.login()
|
await acc.login()
|
||||||
except (ClientError, asyncio.TimeoutError, Aladdin.ConnectionError) as ex:
|
except (ClientError, TimeoutError, Aladdin.ConnectionError) as ex:
|
||||||
raise ConfigEntryNotReady("Can not connect to host") from ex
|
raise ConfigEntryNotReady("Can not connect to host") from ex
|
||||||
except Aladdin.InvalidPasswordError as ex:
|
except Aladdin.InvalidPasswordError as ex:
|
||||||
raise ConfigEntryAuthFailed("Incorrect Password") from ex
|
raise ConfigEntryAuthFailed("Incorrect Password") from ex
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Config flow for Aladdin Connect cover integration."""
|
"""Config flow for Aladdin Connect cover integration."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -42,7 +41,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await acc.login()
|
await acc.login()
|
||||||
except (ClientError, asyncio.TimeoutError, Aladdin.ConnectionError) as ex:
|
except (ClientError, TimeoutError, Aladdin.ConnectionError) as ex:
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
except Aladdin.InvalidPasswordError as ex:
|
except Aladdin.InvalidPasswordError as ex:
|
||||||
@ -81,7 +80,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
except InvalidAuth:
|
except InvalidAuth:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
|
|
||||||
except (ClientError, asyncio.TimeoutError, Aladdin.ConnectionError):
|
except (ClientError, TimeoutError, Aladdin.ConnectionError):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -117,7 +116,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
except InvalidAuth:
|
except InvalidAuth:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
|
|
||||||
except (ClientError, asyncio.TimeoutError, Aladdin.ConnectionError):
|
except (ClientError, TimeoutError, Aladdin.ConnectionError):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
13
homeassistant/components/alarmdecoder/icons.json
Normal file
13
homeassistant/components/alarmdecoder/icons.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"alarm_panel_display": {
|
||||||
|
"default": "mdi:alarm-check"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"services": {
|
||||||
|
"alarm_keypress": "mdi:dialpad",
|
||||||
|
"alarm_toggle_chime": "mdi:abc"
|
||||||
|
}
|
||||||
|
}
|
@ -20,7 +20,7 @@ async def async_setup_entry(
|
|||||||
class AlarmDecoderSensor(SensorEntity):
|
class AlarmDecoderSensor(SensorEntity):
|
||||||
"""Representation of an AlarmDecoder keypad."""
|
"""Representation of an AlarmDecoder keypad."""
|
||||||
|
|
||||||
_attr_icon = "mdi:alarm-check"
|
_attr_translation_key = "alarm_panel_display"
|
||||||
_attr_name = "Alarm Panel Display"
|
_attr_name = "Alarm Panel Display"
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ class Auth:
|
|||||||
allow_redirects=True,
|
allow_redirects=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout calling LWA to get auth token")
|
_LOGGER.error("Timeout calling LWA to get auth token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -29,12 +29,20 @@ class AbstractConfig(ABC):
|
|||||||
"""Initialize abstract config."""
|
"""Initialize abstract config."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._enable_proactive_mode_lock = asyncio.Lock()
|
self._enable_proactive_mode_lock = asyncio.Lock()
|
||||||
|
self._on_deinitialize: list[CALLBACK_TYPE] = []
|
||||||
|
|
||||||
async def async_initialize(self) -> None:
|
async def async_initialize(self) -> None:
|
||||||
"""Perform async initialization of config."""
|
"""Perform async initialization of config."""
|
||||||
self._store = AlexaConfigStore(self.hass)
|
self._store = AlexaConfigStore(self.hass)
|
||||||
await self._store.async_load()
|
await self._store.async_load()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_deinitialize(self) -> None:
|
||||||
|
"""Remove listeners."""
|
||||||
|
_LOGGER.debug("async_deinitialize")
|
||||||
|
while self._on_deinitialize:
|
||||||
|
self._on_deinitialize.pop()()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supports_auth(self) -> bool:
|
def supports_auth(self) -> bool:
|
||||||
"""Return if config supports auth."""
|
"""Return if config supports auth."""
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Alexa state report code."""
|
"""Alexa state report code."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
import json
|
import json
|
||||||
@ -375,7 +374,7 @@ async def async_send_changereport_message(
|
|||||||
allow_redirects=True,
|
allow_redirects=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -531,7 +530,7 @@ async def async_send_doorbell_event_message(
|
|||||||
allow_redirects=True,
|
allow_redirects=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError):
|
except (TimeoutError, aiohttp.ClientError):
|
||||||
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
_LOGGER.error("Timeout sending report to Alexa for %s", alexa_entity.entity_id)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
18
homeassistant/components/amberelectric/icons.json
Normal file
18
homeassistant/components/amberelectric/icons.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"general": {
|
||||||
|
"default": "mdi:transmission-tower"
|
||||||
|
},
|
||||||
|
"controlled_load": {
|
||||||
|
"default": "mdi:clock-outline"
|
||||||
|
},
|
||||||
|
"feed_in": {
|
||||||
|
"default": "mdi:solar-power"
|
||||||
|
},
|
||||||
|
"renewables": {
|
||||||
|
"default": "mdi:solar-power"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -27,12 +27,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|||||||
from .const import ATTRIBUTION, DOMAIN
|
from .const import ATTRIBUTION, DOMAIN
|
||||||
from .coordinator import AmberUpdateCoordinator, normalize_descriptor
|
from .coordinator import AmberUpdateCoordinator, normalize_descriptor
|
||||||
|
|
||||||
ICONS = {
|
|
||||||
"general": "mdi:transmission-tower",
|
|
||||||
"controlled_load": "mdi:clock-outline",
|
|
||||||
"feed_in": "mdi:solar-power",
|
|
||||||
}
|
|
||||||
|
|
||||||
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||||
|
|
||||||
|
|
||||||
@ -219,7 +213,7 @@ async def async_setup_entry(
|
|||||||
name=f"{entry.title} - {friendly_channel_type(channel_type)} Price",
|
name=f"{entry.title} - {friendly_channel_type(channel_type)} Price",
|
||||||
native_unit_of_measurement=UNIT,
|
native_unit_of_measurement=UNIT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
icon=ICONS[channel_type],
|
translation_key=channel_type,
|
||||||
)
|
)
|
||||||
entities.append(AmberPriceSensor(coordinator, description, channel_type))
|
entities.append(AmberPriceSensor(coordinator, description, channel_type))
|
||||||
|
|
||||||
@ -230,7 +224,7 @@ async def async_setup_entry(
|
|||||||
f"{entry.title} - {friendly_channel_type(channel_type)} Price"
|
f"{entry.title} - {friendly_channel_type(channel_type)} Price"
|
||||||
" Descriptor"
|
" Descriptor"
|
||||||
),
|
),
|
||||||
icon=ICONS[channel_type],
|
translation_key=channel_type,
|
||||||
)
|
)
|
||||||
entities.append(
|
entities.append(
|
||||||
AmberPriceDescriptorSensor(coordinator, description, channel_type)
|
AmberPriceDescriptorSensor(coordinator, description, channel_type)
|
||||||
@ -242,7 +236,7 @@ async def async_setup_entry(
|
|||||||
name=f"{entry.title} - {friendly_channel_type(channel_type)} Forecast",
|
name=f"{entry.title} - {friendly_channel_type(channel_type)} Forecast",
|
||||||
native_unit_of_measurement=UNIT,
|
native_unit_of_measurement=UNIT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
icon=ICONS[channel_type],
|
translation_key=channel_type,
|
||||||
)
|
)
|
||||||
entities.append(AmberForecastSensor(coordinator, description, channel_type))
|
entities.append(AmberForecastSensor(coordinator, description, channel_type))
|
||||||
|
|
||||||
@ -251,7 +245,7 @@ async def async_setup_entry(
|
|||||||
name=f"{entry.title} - Renewables",
|
name=f"{entry.title} - Renewables",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
icon="mdi:solar-power",
|
translation_key="renewables",
|
||||||
)
|
)
|
||||||
entities.append(AmberGridSensor(coordinator, renewables_description))
|
entities.append(AmberGridSensor(coordinator, renewables_description))
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ import voluptuous as vol
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, Platform
|
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from . import config_flow
|
from . import config_flow
|
||||||
@ -41,5 +41,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up Ambiclimate from a config entry."""
|
"""Set up Ambiclimate from a config entry."""
|
||||||
|
ir.async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
breaks_in_ha_version="2024.4.0",
|
||||||
|
is_fixable=False,
|
||||||
|
severity=ir.IssueSeverity.WARNING,
|
||||||
|
translation_key="integration_removed",
|
||||||
|
translation_placeholders={
|
||||||
|
"entries": "/config/integrations/integration/ambiclimate",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
return True
|
return True
|
||||||
|
7
homeassistant/components/ambiclimate/icons.json
Normal file
7
homeassistant/components/ambiclimate/icons.json
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"services": {
|
||||||
|
"set_comfort_mode": "mdi:auto-mode",
|
||||||
|
"send_comfort_feedback": "mdi:thermometer-checked",
|
||||||
|
"set_temperature_mode": "mdi:thermometer"
|
||||||
|
}
|
||||||
|
}
|
@ -19,6 +19,12 @@
|
|||||||
"access_token": "Unknown error generating an access token."
|
"access_token": "Unknown error generating an access token."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"issues": {
|
||||||
|
"integration_removed": {
|
||||||
|
"title": "The Ambiclimate integration has been deprecated and will be removed",
|
||||||
|
"description": "All Ambiclimate services will be terminated, effective March 31, 2024, as Ambi Labs winds down business operations, and the Ambiclimate integration will be removed from Home Assistant.\n\nTo resolve this issue, please remove the integration entries from your Home Assistant setup. [Click here to see your existing Logi Circle integration entries]({entries})."
|
||||||
|
}
|
||||||
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"set_comfort_mode": {
|
"set_comfort_mode": {
|
||||||
"name": "Set comfort mode",
|
"name": "Set comfort mode",
|
||||||
@ -40,7 +46,7 @@
|
|||||||
},
|
},
|
||||||
"value": {
|
"value": {
|
||||||
"name": "Comfort value",
|
"name": "Comfort value",
|
||||||
"description": "Send any of the following comfort values: too_hot, too_warm, bit_warm, comfortable, bit_cold, too_cold, freezing\n."
|
"description": "Send any of the following comfort values: too_hot, too_warm, bit_warm, comfortable, bit_cold, too_cold, freezing."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -111,7 +111,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
en_reg = er.async_get(hass)
|
en_reg = er.async_get(hass)
|
||||||
en_reg.async_clear_config_entry(entry.entry_id)
|
en_reg.async_clear_config_entry(entry.entry_id)
|
||||||
|
|
||||||
version = entry.version = 2
|
version = 2
|
||||||
|
hass.config_entries.async_update_entry(entry, version=version)
|
||||||
|
|
||||||
LOGGER.info("Migration to version %s successful", version)
|
LOGGER.info("Migration to version %s successful", version)
|
||||||
|
|
||||||
|
1
homeassistant/components/amp_motorization/__init__.py
Normal file
1
homeassistant/components/amp_motorization/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""Virtual integration: AMP motorization."""
|
@ -173,6 +173,7 @@ class Analytics:
|
|||||||
|
|
||||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||||
"""Send analytics."""
|
"""Send analytics."""
|
||||||
|
hass = self.hass
|
||||||
supervisor_info = None
|
supervisor_info = None
|
||||||
operating_system_info: dict[str, Any] = {}
|
operating_system_info: dict[str, Any] = {}
|
||||||
|
|
||||||
@ -185,10 +186,10 @@ class Analytics:
|
|||||||
await self._store.async_save(dataclass_asdict(self._data))
|
await self._store.async_save(dataclass_asdict(self._data))
|
||||||
|
|
||||||
if self.supervisor:
|
if self.supervisor:
|
||||||
supervisor_info = hassio.get_supervisor_info(self.hass)
|
supervisor_info = hassio.get_supervisor_info(hass)
|
||||||
operating_system_info = hassio.get_os_info(self.hass) or {}
|
operating_system_info = hassio.get_os_info(hass) or {}
|
||||||
|
|
||||||
system_info = await async_get_system_info(self.hass)
|
system_info = await async_get_system_info(hass)
|
||||||
integrations = []
|
integrations = []
|
||||||
custom_integrations = []
|
custom_integrations = []
|
||||||
addons = []
|
addons = []
|
||||||
@ -214,10 +215,10 @@ class Analytics:
|
|||||||
if self.preferences.get(ATTR_USAGE, False) or self.preferences.get(
|
if self.preferences.get(ATTR_USAGE, False) or self.preferences.get(
|
||||||
ATTR_STATISTICS, False
|
ATTR_STATISTICS, False
|
||||||
):
|
):
|
||||||
ent_reg = er.async_get(self.hass)
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yaml_configuration = await conf_util.async_hass_config_yaml(self.hass)
|
yaml_configuration = await conf_util.async_hass_config_yaml(hass)
|
||||||
except HomeAssistantError as err:
|
except HomeAssistantError as err:
|
||||||
LOGGER.error(err)
|
LOGGER.error(err)
|
||||||
return
|
return
|
||||||
@ -229,8 +230,8 @@ class Analytics:
|
|||||||
if not entity.disabled
|
if not entity.disabled
|
||||||
}
|
}
|
||||||
|
|
||||||
domains = async_get_loaded_integrations(self.hass)
|
domains = async_get_loaded_integrations(hass)
|
||||||
configured_integrations = await async_get_integrations(self.hass, domains)
|
configured_integrations = await async_get_integrations(hass, domains)
|
||||||
enabled_domains = set(configured_integrations)
|
enabled_domains = set(configured_integrations)
|
||||||
|
|
||||||
for integration in configured_integrations.values():
|
for integration in configured_integrations.values():
|
||||||
@ -261,7 +262,7 @@ class Analytics:
|
|||||||
if supervisor_info is not None:
|
if supervisor_info is not None:
|
||||||
installed_addons = await asyncio.gather(
|
installed_addons = await asyncio.gather(
|
||||||
*(
|
*(
|
||||||
hassio.async_get_addon_info(self.hass, addon[ATTR_SLUG])
|
hassio.async_get_addon_info(hass, addon[ATTR_SLUG])
|
||||||
for addon in supervisor_info[ATTR_ADDONS]
|
for addon in supervisor_info[ATTR_ADDONS]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -276,7 +277,7 @@ class Analytics:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.preferences.get(ATTR_USAGE, False):
|
if self.preferences.get(ATTR_USAGE, False):
|
||||||
payload[ATTR_CERTIFICATE] = self.hass.http.ssl_certificate is not None
|
payload[ATTR_CERTIFICATE] = hass.http.ssl_certificate is not None
|
||||||
payload[ATTR_INTEGRATIONS] = integrations
|
payload[ATTR_INTEGRATIONS] = integrations
|
||||||
payload[ATTR_CUSTOM_INTEGRATIONS] = custom_integrations
|
payload[ATTR_CUSTOM_INTEGRATIONS] = custom_integrations
|
||||||
if supervisor_info is not None:
|
if supervisor_info is not None:
|
||||||
@ -284,11 +285,11 @@ class Analytics:
|
|||||||
|
|
||||||
if ENERGY_DOMAIN in enabled_domains:
|
if ENERGY_DOMAIN in enabled_domains:
|
||||||
payload[ATTR_ENERGY] = {
|
payload[ATTR_ENERGY] = {
|
||||||
ATTR_CONFIGURED: await energy_is_configured(self.hass)
|
ATTR_CONFIGURED: await energy_is_configured(hass)
|
||||||
}
|
}
|
||||||
|
|
||||||
if RECORDER_DOMAIN in enabled_domains:
|
if RECORDER_DOMAIN in enabled_domains:
|
||||||
instance = get_recorder_instance(self.hass)
|
instance = get_recorder_instance(hass)
|
||||||
engine = instance.database_engine
|
engine = instance.database_engine
|
||||||
if engine and engine.version is not None:
|
if engine and engine.version is not None:
|
||||||
payload[ATTR_RECORDER] = {
|
payload[ATTR_RECORDER] = {
|
||||||
@ -297,9 +298,9 @@ class Analytics:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if self.preferences.get(ATTR_STATISTICS, False):
|
if self.preferences.get(ATTR_STATISTICS, False):
|
||||||
payload[ATTR_STATE_COUNT] = len(self.hass.states.async_all())
|
payload[ATTR_STATE_COUNT] = hass.states.async_entity_ids_count()
|
||||||
payload[ATTR_AUTOMATION_COUNT] = len(
|
payload[ATTR_AUTOMATION_COUNT] = hass.states.async_entity_ids_count(
|
||||||
self.hass.states.async_all(AUTOMATION_DOMAIN)
|
AUTOMATION_DOMAIN
|
||||||
)
|
)
|
||||||
payload[ATTR_INTEGRATION_COUNT] = len(integrations)
|
payload[ATTR_INTEGRATION_COUNT] = len(integrations)
|
||||||
if supervisor_info is not None:
|
if supervisor_info is not None:
|
||||||
@ -307,7 +308,7 @@ class Analytics:
|
|||||||
payload[ATTR_USER_COUNT] = len(
|
payload[ATTR_USER_COUNT] = len(
|
||||||
[
|
[
|
||||||
user
|
user
|
||||||
for user in await self.hass.auth.async_get_users()
|
for user in await hass.auth.async_get_users()
|
||||||
if not user.system_generated
|
if not user.system_generated
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ -329,7 +330,7 @@ class Analytics:
|
|||||||
response.status,
|
response.status,
|
||||||
self.endpoint,
|
self.endpoint,
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
LOGGER.error(
|
LOGGER.error(
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||||
"dependencies": ["api", "websocket_api"],
|
"dependencies": ["api", "websocket_api"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||||
|
"import_executor": true,
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"quality_scale": "internal"
|
"quality_scale": "internal"
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
"codeowners": ["@joostlek"],
|
"codeowners": ["@joostlek"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/analytics_insights",
|
"documentation": "https://www.home-assistant.io/integrations/analytics_insights",
|
||||||
|
"import_executor": true,
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["python_homeassistant_analytics"],
|
"loggers": ["python_homeassistant_analytics"],
|
||||||
|
62
homeassistant/components/android_ip_webcam/icons.json
Normal file
62
homeassistant/components/android_ip_webcam/icons.json
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"audio_connections": {
|
||||||
|
"default": "mdi:speaker"
|
||||||
|
},
|
||||||
|
"battery_temperature": {
|
||||||
|
"default": "mdi:thermometer"
|
||||||
|
},
|
||||||
|
"light": {
|
||||||
|
"default": "mdi:flashlight"
|
||||||
|
},
|
||||||
|
"motion": {
|
||||||
|
"default": "mdi:run"
|
||||||
|
},
|
||||||
|
"pressure": {
|
||||||
|
"default": "mdi:gauge"
|
||||||
|
},
|
||||||
|
"proximity": {
|
||||||
|
"default": "mdi:map-marker-radius"
|
||||||
|
},
|
||||||
|
"sound": {
|
||||||
|
"default": "mdi:speaker"
|
||||||
|
},
|
||||||
|
"video_connections": {
|
||||||
|
"default": "mdi:eye"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"switch": {
|
||||||
|
"exposure_lock": {
|
||||||
|
"default": "mdi:camera"
|
||||||
|
},
|
||||||
|
"ffc": {
|
||||||
|
"default": "mdi:camera-front-variant"
|
||||||
|
},
|
||||||
|
"focus": {
|
||||||
|
"default": "mdi:image-filter-center-focus"
|
||||||
|
},
|
||||||
|
"gps_active": {
|
||||||
|
"default": "mdi:crosshairs-gps"
|
||||||
|
},
|
||||||
|
"motion_detect": {
|
||||||
|
"default": "mdi:flash"
|
||||||
|
},
|
||||||
|
"night_vision": {
|
||||||
|
"default": "mdi:weather-night"
|
||||||
|
},
|
||||||
|
"overlay": {
|
||||||
|
"default": "mdi:monitor"
|
||||||
|
},
|
||||||
|
"torch": {
|
||||||
|
"default": "mdi:white-balance-sunny"
|
||||||
|
},
|
||||||
|
"whitebalance_lock": {
|
||||||
|
"default": "mdi:white-balance-auto"
|
||||||
|
},
|
||||||
|
"video_recording": {
|
||||||
|
"default": "mdi:record-rec"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -42,8 +42,8 @@ class AndroidIPWebcamSensorEntityDescription(
|
|||||||
SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = (
|
SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = (
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="audio_connections",
|
key="audio_connections",
|
||||||
|
translation_key="audio_connections",
|
||||||
name="Audio connections",
|
name="Audio connections",
|
||||||
icon="mdi:speaker",
|
|
||||||
state_class=SensorStateClass.TOTAL,
|
state_class=SensorStateClass.TOTAL,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
value_fn=lambda ipcam: ipcam.status_data.get("audio_connections"),
|
value_fn=lambda ipcam: ipcam.status_data.get("audio_connections"),
|
||||||
@ -59,8 +59,8 @@ SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = (
|
|||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="battery_temp",
|
key="battery_temp",
|
||||||
|
translation_key="battery_temperature",
|
||||||
name="Battery temperature",
|
name="Battery temperature",
|
||||||
icon="mdi:thermometer",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("battery_temp"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("battery_temp"),
|
||||||
@ -76,48 +76,48 @@ SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = (
|
|||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="light",
|
key="light",
|
||||||
|
translation_key="light",
|
||||||
name="Light level",
|
name="Light level",
|
||||||
icon="mdi:flashlight",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("light"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("light"),
|
||||||
unit_fn=lambda ipcam: ipcam.get_sensor_unit("light"),
|
unit_fn=lambda ipcam: ipcam.get_sensor_unit("light"),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="motion",
|
key="motion",
|
||||||
|
translation_key="motion",
|
||||||
name="Motion",
|
name="Motion",
|
||||||
icon="mdi:run",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("motion"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("motion"),
|
||||||
unit_fn=lambda ipcam: ipcam.get_sensor_unit("motion"),
|
unit_fn=lambda ipcam: ipcam.get_sensor_unit("motion"),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="pressure",
|
key="pressure",
|
||||||
|
translation_key="pressure",
|
||||||
name="Pressure",
|
name="Pressure",
|
||||||
icon="mdi:gauge",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("pressure"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("pressure"),
|
||||||
unit_fn=lambda ipcam: ipcam.get_sensor_unit("pressure"),
|
unit_fn=lambda ipcam: ipcam.get_sensor_unit("pressure"),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="proximity",
|
key="proximity",
|
||||||
|
translation_key="proximity",
|
||||||
name="Proximity",
|
name="Proximity",
|
||||||
icon="mdi:map-marker-radius",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("proximity"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("proximity"),
|
||||||
unit_fn=lambda ipcam: ipcam.get_sensor_unit("proximity"),
|
unit_fn=lambda ipcam: ipcam.get_sensor_unit("proximity"),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="sound",
|
key="sound",
|
||||||
|
translation_key="sound",
|
||||||
name="Sound",
|
name="Sound",
|
||||||
icon="mdi:speaker",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
value_fn=lambda ipcam: ipcam.get_sensor_value("sound"),
|
value_fn=lambda ipcam: ipcam.get_sensor_value("sound"),
|
||||||
unit_fn=lambda ipcam: ipcam.get_sensor_unit("sound"),
|
unit_fn=lambda ipcam: ipcam.get_sensor_unit("sound"),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSensorEntityDescription(
|
AndroidIPWebcamSensorEntityDescription(
|
||||||
key="video_connections",
|
key="video_connections",
|
||||||
|
translation_key="video_connections",
|
||||||
name="Video connections",
|
name="Video connections",
|
||||||
icon="mdi:eye",
|
|
||||||
state_class=SensorStateClass.TOTAL,
|
state_class=SensorStateClass.TOTAL,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
value_fn=lambda ipcam: ipcam.status_data.get("video_connections"),
|
value_fn=lambda ipcam: ipcam.status_data.get("video_connections"),
|
||||||
|
@ -36,80 +36,80 @@ class AndroidIPWebcamSwitchEntityDescription(
|
|||||||
SWITCH_TYPES: tuple[AndroidIPWebcamSwitchEntityDescription, ...] = (
|
SWITCH_TYPES: tuple[AndroidIPWebcamSwitchEntityDescription, ...] = (
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="exposure_lock",
|
key="exposure_lock",
|
||||||
|
translation_key="exposure_lock",
|
||||||
name="Exposure lock",
|
name="Exposure lock",
|
||||||
icon="mdi:camera",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("exposure_lock", True),
|
on_func=lambda ipcam: ipcam.change_setting("exposure_lock", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("exposure_lock", False),
|
off_func=lambda ipcam: ipcam.change_setting("exposure_lock", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="ffc",
|
key="ffc",
|
||||||
|
translation_key="ffc",
|
||||||
name="Front-facing camera",
|
name="Front-facing camera",
|
||||||
icon="mdi:camera-front-variant",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("ffc", True),
|
on_func=lambda ipcam: ipcam.change_setting("ffc", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("ffc", False),
|
off_func=lambda ipcam: ipcam.change_setting("ffc", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="focus",
|
key="focus",
|
||||||
|
translation_key="focus",
|
||||||
name="Focus",
|
name="Focus",
|
||||||
icon="mdi:image-filter-center-focus",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.focus(activate=True),
|
on_func=lambda ipcam: ipcam.focus(activate=True),
|
||||||
off_func=lambda ipcam: ipcam.focus(activate=False),
|
off_func=lambda ipcam: ipcam.focus(activate=False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="gps_active",
|
key="gps_active",
|
||||||
|
translation_key="gps_active",
|
||||||
name="GPS active",
|
name="GPS active",
|
||||||
icon="mdi:crosshairs-gps",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("gps_active", True),
|
on_func=lambda ipcam: ipcam.change_setting("gps_active", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("gps_active", False),
|
off_func=lambda ipcam: ipcam.change_setting("gps_active", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="motion_detect",
|
key="motion_detect",
|
||||||
|
translation_key="motion_detect",
|
||||||
name="Motion detection",
|
name="Motion detection",
|
||||||
icon="mdi:flash",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("motion_detect", True),
|
on_func=lambda ipcam: ipcam.change_setting("motion_detect", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("motion_detect", False),
|
off_func=lambda ipcam: ipcam.change_setting("motion_detect", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="night_vision",
|
key="night_vision",
|
||||||
|
translation_key="night_vision",
|
||||||
name="Night vision",
|
name="Night vision",
|
||||||
icon="mdi:weather-night",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("night_vision", True),
|
on_func=lambda ipcam: ipcam.change_setting("night_vision", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("night_vision", False),
|
off_func=lambda ipcam: ipcam.change_setting("night_vision", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="overlay",
|
key="overlay",
|
||||||
|
translation_key="overlay",
|
||||||
name="Overlay",
|
name="Overlay",
|
||||||
icon="mdi:monitor",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("overlay", True),
|
on_func=lambda ipcam: ipcam.change_setting("overlay", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("overlay", False),
|
off_func=lambda ipcam: ipcam.change_setting("overlay", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="torch",
|
key="torch",
|
||||||
|
translation_key="torch",
|
||||||
name="Torch",
|
name="Torch",
|
||||||
icon="mdi:white-balance-sunny",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.torch(activate=True),
|
on_func=lambda ipcam: ipcam.torch(activate=True),
|
||||||
off_func=lambda ipcam: ipcam.torch(activate=False),
|
off_func=lambda ipcam: ipcam.torch(activate=False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="whitebalance_lock",
|
key="whitebalance_lock",
|
||||||
|
translation_key="whitebalance_lock",
|
||||||
name="White balance lock",
|
name="White balance lock",
|
||||||
icon="mdi:white-balance-auto",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.change_setting("whitebalance_lock", True),
|
on_func=lambda ipcam: ipcam.change_setting("whitebalance_lock", True),
|
||||||
off_func=lambda ipcam: ipcam.change_setting("whitebalance_lock", False),
|
off_func=lambda ipcam: ipcam.change_setting("whitebalance_lock", False),
|
||||||
),
|
),
|
||||||
AndroidIPWebcamSwitchEntityDescription(
|
AndroidIPWebcamSwitchEntityDescription(
|
||||||
key="video_recording",
|
key="video_recording",
|
||||||
|
translation_key="video_recording",
|
||||||
name="Video recording",
|
name="Video recording",
|
||||||
icon="mdi:record-rec",
|
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
on_func=lambda ipcam: ipcam.record(record=True),
|
on_func=lambda ipcam: ipcam.record(record=True),
|
||||||
off_func=lambda ipcam: ipcam.record(record=False),
|
off_func=lambda ipcam: ipcam.record(record=False),
|
||||||
|
145
homeassistant/components/androidtv/entity.py
Normal file
145
homeassistant/components/androidtv/entity.py
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
"""Base AndroidTV Entity."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from typing import Any, Concatenate, ParamSpec, TypeVar
|
||||||
|
|
||||||
|
from androidtv.exceptions import LockNotAcquiredException
|
||||||
|
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_CONNECTIONS,
|
||||||
|
ATTR_IDENTIFIERS,
|
||||||
|
ATTR_MANUFACTURER,
|
||||||
|
ATTR_MODEL,
|
||||||
|
ATTR_SW_VERSION,
|
||||||
|
CONF_HOST,
|
||||||
|
CONF_NAME,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
|
from . import ADB_PYTHON_EXCEPTIONS, ADB_TCP_EXCEPTIONS, get_androidtv_mac
|
||||||
|
from .const import DEVICE_ANDROIDTV, DOMAIN
|
||||||
|
|
||||||
|
PREFIX_ANDROIDTV = "Android TV"
|
||||||
|
PREFIX_FIRETV = "Fire TV"
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_ADBDeviceT = TypeVar("_ADBDeviceT", bound="AndroidTVEntity")
|
||||||
|
_R = TypeVar("_R")
|
||||||
|
_P = ParamSpec("_P")
|
||||||
|
|
||||||
|
_FuncType = Callable[Concatenate[_ADBDeviceT, _P], Awaitable[_R]]
|
||||||
|
_ReturnFuncType = Callable[Concatenate[_ADBDeviceT, _P], Coroutine[Any, Any, _R | None]]
|
||||||
|
|
||||||
|
|
||||||
|
def adb_decorator(
|
||||||
|
override_available: bool = False,
|
||||||
|
) -> Callable[[_FuncType[_ADBDeviceT, _P, _R]], _ReturnFuncType[_ADBDeviceT, _P, _R]]:
|
||||||
|
"""Wrap ADB methods and catch exceptions.
|
||||||
|
|
||||||
|
Allows for overriding the available status of the ADB connection via the
|
||||||
|
`override_available` parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _adb_decorator(
|
||||||
|
func: _FuncType[_ADBDeviceT, _P, _R],
|
||||||
|
) -> _ReturnFuncType[_ADBDeviceT, _P, _R]:
|
||||||
|
"""Wrap the provided ADB method and catch exceptions."""
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def _adb_exception_catcher(
|
||||||
|
self: _ADBDeviceT, *args: _P.args, **kwargs: _P.kwargs
|
||||||
|
) -> _R | None:
|
||||||
|
"""Call an ADB-related method and catch exceptions."""
|
||||||
|
if not self.available and not override_available:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await func(self, *args, **kwargs)
|
||||||
|
except LockNotAcquiredException:
|
||||||
|
# If the ADB lock could not be acquired, skip this command
|
||||||
|
_LOGGER.info(
|
||||||
|
(
|
||||||
|
"ADB command %s not executed because the connection is"
|
||||||
|
" currently in use"
|
||||||
|
),
|
||||||
|
func.__name__,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
except self.exceptions as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
(
|
||||||
|
"Failed to execute an ADB command. ADB connection re-"
|
||||||
|
"establishing attempt in the next update. Error: %s"
|
||||||
|
),
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
await self.aftv.adb_close()
|
||||||
|
# pylint: disable-next=protected-access
|
||||||
|
self._attr_available = False
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
# An unforeseen exception occurred. Close the ADB connection so that
|
||||||
|
# it doesn't happen over and over again, then raise the exception.
|
||||||
|
await self.aftv.adb_close()
|
||||||
|
# pylint: disable-next=protected-access
|
||||||
|
self._attr_available = False
|
||||||
|
raise
|
||||||
|
|
||||||
|
return _adb_exception_catcher
|
||||||
|
|
||||||
|
return _adb_decorator
|
||||||
|
|
||||||
|
|
||||||
|
class AndroidTVEntity(Entity):
|
||||||
|
"""Defines a base AndroidTV entity."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
aftv: AndroidTVAsync | FireTVAsync,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
entry_data: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the AndroidTV base entity."""
|
||||||
|
self.aftv = aftv
|
||||||
|
self._attr_unique_id = entry.unique_id
|
||||||
|
self._entry_data = entry_data
|
||||||
|
|
||||||
|
device_class = aftv.DEVICE_CLASS
|
||||||
|
device_type = (
|
||||||
|
PREFIX_ANDROIDTV if device_class == DEVICE_ANDROIDTV else PREFIX_FIRETV
|
||||||
|
)
|
||||||
|
# CONF_NAME may be present in entry.data for configuration imported from YAML
|
||||||
|
device_name = entry.data.get(
|
||||||
|
CONF_NAME, f"{device_type} {entry.data[CONF_HOST]}"
|
||||||
|
)
|
||||||
|
info = aftv.device_properties
|
||||||
|
model = info.get(ATTR_MODEL)
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
model=f"{model} ({device_type})" if model else device_type,
|
||||||
|
name=device_name,
|
||||||
|
)
|
||||||
|
if self.unique_id:
|
||||||
|
self._attr_device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)}
|
||||||
|
if manufacturer := info.get(ATTR_MANUFACTURER):
|
||||||
|
self._attr_device_info[ATTR_MANUFACTURER] = manufacturer
|
||||||
|
if sw_version := info.get(ATTR_SW_VERSION):
|
||||||
|
self._attr_device_info[ATTR_SW_VERSION] = sw_version
|
||||||
|
if mac := get_androidtv_mac(info):
|
||||||
|
self._attr_device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, mac)}
|
||||||
|
|
||||||
|
# ADB exceptions to catch
|
||||||
|
if not aftv.adb_server_ip:
|
||||||
|
# Using "adb_shell" (Python ADB implementation)
|
||||||
|
self.exceptions = ADB_PYTHON_EXCEPTIONS
|
||||||
|
else:
|
||||||
|
# Using "pure-python-adb" (communicate with ADB server)
|
||||||
|
self.exceptions = ADB_TCP_EXCEPTIONS
|
8
homeassistant/components/androidtv/icons.json
Normal file
8
homeassistant/components/androidtv/icons.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"services": {
|
||||||
|
"adb_command": "mdi:console",
|
||||||
|
"download": "mdi:download",
|
||||||
|
"upload": "mdi:upload",
|
||||||
|
"learn_sendevent": "mdi:remote"
|
||||||
|
}
|
||||||
|
}
|
@ -1,15 +1,12 @@
|
|||||||
"""Support for functionality to interact with Android / Fire TV devices."""
|
"""Support for functionality to interact with Android / Fire TV devices."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import functools
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Concatenate, ParamSpec, TypeVar
|
from typing import Any
|
||||||
|
|
||||||
from androidtv.constants import APPS, KEYS
|
from androidtv.constants import APPS, KEYS
|
||||||
from androidtv.exceptions import LockNotAcquiredException
|
|
||||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -21,23 +18,13 @@ from homeassistant.components.media_player import (
|
|||||||
MediaPlayerState,
|
MediaPlayerState,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import ATTR_COMMAND
|
||||||
ATTR_COMMAND,
|
|
||||||
ATTR_CONNECTIONS,
|
|
||||||
ATTR_MANUFACTURER,
|
|
||||||
ATTR_MODEL,
|
|
||||||
ATTR_SW_VERSION,
|
|
||||||
CONF_HOST,
|
|
||||||
CONF_NAME,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
|
|
||||||
from . import ADB_PYTHON_EXCEPTIONS, ADB_TCP_EXCEPTIONS, get_androidtv_mac
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ANDROID_DEV,
|
ANDROID_DEV,
|
||||||
ANDROID_DEV_OPT,
|
ANDROID_DEV_OPT,
|
||||||
@ -54,10 +41,7 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
SIGNAL_CONFIG_ENTITY,
|
SIGNAL_CONFIG_ENTITY,
|
||||||
)
|
)
|
||||||
|
from .entity import AndroidTVEntity, adb_decorator
|
||||||
_ADBDeviceT = TypeVar("_ADBDeviceT", bound="ADBDevice")
|
|
||||||
_R = TypeVar("_R")
|
|
||||||
_P = ParamSpec("_P")
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -73,9 +57,6 @@ SERVICE_DOWNLOAD = "download"
|
|||||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||||
SERVICE_UPLOAD = "upload"
|
SERVICE_UPLOAD = "upload"
|
||||||
|
|
||||||
PREFIX_ANDROIDTV = "Android TV"
|
|
||||||
PREFIX_FIRETV = "Fire TV"
|
|
||||||
|
|
||||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||||
ANDROIDTV_STATES = {
|
ANDROIDTV_STATES = {
|
||||||
"off": MediaPlayerState.OFF,
|
"off": MediaPlayerState.OFF,
|
||||||
@ -92,25 +73,11 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Android Debug Bridge entity."""
|
"""Set up the Android Debug Bridge entity."""
|
||||||
aftv: AndroidTVAsync | FireTVAsync = hass.data[DOMAIN][entry.entry_id][ANDROID_DEV]
|
entry_data = hass.data[DOMAIN][entry.entry_id]
|
||||||
|
aftv: AndroidTVAsync | FireTVAsync = entry_data[ANDROID_DEV]
|
||||||
|
|
||||||
device_class = aftv.DEVICE_CLASS
|
device_class = aftv.DEVICE_CLASS
|
||||||
device_type = (
|
device_args = [aftv, entry, entry_data]
|
||||||
PREFIX_ANDROIDTV if device_class == DEVICE_ANDROIDTV else PREFIX_FIRETV
|
|
||||||
)
|
|
||||||
# CONF_NAME may be present in entry.data for configuration imported from YAML
|
|
||||||
device_name: str = entry.data.get(
|
|
||||||
CONF_NAME, f"{device_type} {entry.data[CONF_HOST]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
device_args = [
|
|
||||||
aftv,
|
|
||||||
device_name,
|
|
||||||
device_type,
|
|
||||||
entry.unique_id,
|
|
||||||
entry.entry_id,
|
|
||||||
hass.data[DOMAIN][entry.entry_id],
|
|
||||||
]
|
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
AndroidTVDevice(*device_args)
|
AndroidTVDevice(*device_args)
|
||||||
@ -146,108 +113,25 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
_FuncType = Callable[Concatenate[_ADBDeviceT, _P], Awaitable[_R]]
|
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||||
_ReturnFuncType = Callable[Concatenate[_ADBDeviceT, _P], Coroutine[Any, Any, _R | None]]
|
|
||||||
|
|
||||||
|
|
||||||
def adb_decorator(
|
|
||||||
override_available: bool = False,
|
|
||||||
) -> Callable[[_FuncType[_ADBDeviceT, _P, _R]], _ReturnFuncType[_ADBDeviceT, _P, _R]]:
|
|
||||||
"""Wrap ADB methods and catch exceptions.
|
|
||||||
|
|
||||||
Allows for overriding the available status of the ADB connection via the
|
|
||||||
`override_available` parameter.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _adb_decorator(
|
|
||||||
func: _FuncType[_ADBDeviceT, _P, _R],
|
|
||||||
) -> _ReturnFuncType[_ADBDeviceT, _P, _R]:
|
|
||||||
"""Wrap the provided ADB method and catch exceptions."""
|
|
||||||
|
|
||||||
@functools.wraps(func)
|
|
||||||
async def _adb_exception_catcher(
|
|
||||||
self: _ADBDeviceT, *args: _P.args, **kwargs: _P.kwargs
|
|
||||||
) -> _R | None:
|
|
||||||
"""Call an ADB-related method and catch exceptions."""
|
|
||||||
if not self.available and not override_available:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await func(self, *args, **kwargs)
|
|
||||||
except LockNotAcquiredException:
|
|
||||||
# If the ADB lock could not be acquired, skip this command
|
|
||||||
_LOGGER.info(
|
|
||||||
(
|
|
||||||
"ADB command %s not executed because the connection is"
|
|
||||||
" currently in use"
|
|
||||||
),
|
|
||||||
func.__name__,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
except self.exceptions as err:
|
|
||||||
_LOGGER.error(
|
|
||||||
(
|
|
||||||
"Failed to execute an ADB command. ADB connection re-"
|
|
||||||
"establishing attempt in the next update. Error: %s"
|
|
||||||
),
|
|
||||||
err,
|
|
||||||
)
|
|
||||||
await self.aftv.adb_close()
|
|
||||||
# pylint: disable-next=protected-access
|
|
||||||
self._attr_available = False
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
# An unforeseen exception occurred. Close the ADB connection so that
|
|
||||||
# it doesn't happen over and over again, then raise the exception.
|
|
||||||
await self.aftv.adb_close()
|
|
||||||
# pylint: disable-next=protected-access
|
|
||||||
self._attr_available = False
|
|
||||||
raise
|
|
||||||
|
|
||||||
return _adb_exception_catcher
|
|
||||||
|
|
||||||
return _adb_decorator
|
|
||||||
|
|
||||||
|
|
||||||
class ADBDevice(MediaPlayerEntity):
|
|
||||||
"""Representation of an Android or Fire TV device."""
|
"""Representation of an Android or Fire TV device."""
|
||||||
|
|
||||||
_attr_device_class = MediaPlayerDeviceClass.TV
|
_attr_device_class = MediaPlayerDeviceClass.TV
|
||||||
_attr_has_entity_name = True
|
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
aftv: AndroidTVAsync | FireTVAsync,
|
aftv: AndroidTVAsync | FireTVAsync,
|
||||||
name: str,
|
entry: ConfigEntry,
|
||||||
dev_type: str,
|
|
||||||
unique_id: str,
|
|
||||||
entry_id: str,
|
|
||||||
entry_data: dict[str, Any],
|
entry_data: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Android / Fire TV device."""
|
"""Initialize the Android / Fire TV device."""
|
||||||
self.aftv = aftv
|
super().__init__(aftv, entry, entry_data)
|
||||||
self._attr_unique_id = unique_id
|
self._entry_id = entry.entry_id
|
||||||
self._entry_id = entry_id
|
|
||||||
self._entry_data = entry_data
|
|
||||||
|
|
||||||
self._media_image: tuple[bytes | None, str | None] = None, None
|
self._media_image: tuple[bytes | None, str | None] = None, None
|
||||||
self._attr_media_image_hash = None
|
self._attr_media_image_hash = None
|
||||||
|
|
||||||
info = aftv.device_properties
|
|
||||||
model = info.get(ATTR_MODEL)
|
|
||||||
self._attr_device_info = DeviceInfo(
|
|
||||||
identifiers={(DOMAIN, unique_id)},
|
|
||||||
model=f"{model} ({dev_type})" if model else dev_type,
|
|
||||||
name=name,
|
|
||||||
)
|
|
||||||
if manufacturer := info.get(ATTR_MANUFACTURER):
|
|
||||||
self._attr_device_info[ATTR_MANUFACTURER] = manufacturer
|
|
||||||
if sw_version := info.get(ATTR_SW_VERSION):
|
|
||||||
self._attr_device_info[ATTR_SW_VERSION] = sw_version
|
|
||||||
if mac := get_androidtv_mac(info):
|
|
||||||
self._attr_device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, mac)}
|
|
||||||
|
|
||||||
self._app_id_to_name: dict[str, str] = {}
|
self._app_id_to_name: dict[str, str] = {}
|
||||||
self._app_name_to_id: dict[str, str] = {}
|
self._app_name_to_id: dict[str, str] = {}
|
||||||
self._get_sources = DEFAULT_GET_SOURCES
|
self._get_sources = DEFAULT_GET_SOURCES
|
||||||
@ -256,14 +140,6 @@ class ADBDevice(MediaPlayerEntity):
|
|||||||
self.turn_on_command: str | None = None
|
self.turn_on_command: str | None = None
|
||||||
self.turn_off_command: str | None = None
|
self.turn_off_command: str | None = None
|
||||||
|
|
||||||
# ADB exceptions to catch
|
|
||||||
if not aftv.adb_server_ip:
|
|
||||||
# Using "adb_shell" (Python ADB implementation)
|
|
||||||
self.exceptions = ADB_PYTHON_EXCEPTIONS
|
|
||||||
else:
|
|
||||||
# Using "pure-python-adb" (communicate with ADB server)
|
|
||||||
self.exceptions = ADB_TCP_EXCEPTIONS
|
|
||||||
|
|
||||||
# Property attributes
|
# Property attributes
|
||||||
self._attr_extra_state_attributes = {
|
self._attr_extra_state_attributes = {
|
||||||
ATTR_ADB_RESPONSE: None,
|
ATTR_ADB_RESPONSE: None,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""The Android TV Remote integration."""
|
"""The Android TV Remote integration."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
@ -50,7 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
except InvalidAuth as exc:
|
except InvalidAuth as exc:
|
||||||
# The Android TV is hard reset or the certificate and key files were deleted.
|
# The Android TV is hard reset or the certificate and key files were deleted.
|
||||||
raise ConfigEntryAuthFailed from exc
|
raise ConfigEntryAuthFailed from exc
|
||||||
except (CannotConnect, ConnectionClosed, asyncio.TimeoutError) as exc:
|
except (CannotConnect, ConnectionClosed, TimeoutError) as exc:
|
||||||
# The Android TV is network unreachable. Raise exception and let Home Assistant retry
|
# The Android TV is network unreachable. Raise exception and let Home Assistant retry
|
||||||
# later. If device gets a new IP address the zeroconf flow will update the config.
|
# later. If device gets a new IP address the zeroconf flow will update the config.
|
||||||
raise ConfigEntryNotReady from exc
|
raise ConfigEntryNotReady from exc
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
"codeowners": ["@tronikos", "@Drafteed"],
|
"codeowners": ["@tronikos", "@Drafteed"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/androidtv_remote",
|
"documentation": "https://www.home-assistant.io/integrations/androidtv_remote",
|
||||||
|
"import_executor": true,
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["androidtvremote2"],
|
"loggers": ["androidtvremote2"],
|
||||||
|
@ -53,7 +53,6 @@ class AnthemAVR(MediaPlayerEntity):
|
|||||||
_attr_name = None
|
_attr_name = None
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_device_class = MediaPlayerDeviceClass.RECEIVER
|
_attr_device_class = MediaPlayerDeviceClass.RECEIVER
|
||||||
_attr_icon = "mdi:audio-video"
|
|
||||||
_attr_supported_features = (
|
_attr_supported_features = (
|
||||||
MediaPlayerEntityFeature.VOLUME_SET
|
MediaPlayerEntityFeature.VOLUME_SET
|
||||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||||
|
9
homeassistant/components/aosmith/icons.json
Normal file
9
homeassistant/components/aosmith/icons.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"sensor": {
|
||||||
|
"hot_water_availability": {
|
||||||
|
"default": "mdi:water-thermometer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -33,7 +33,6 @@ STATUS_ENTITY_DESCRIPTIONS: tuple[AOSmithStatusSensorEntityDescription, ...] = (
|
|||||||
AOSmithStatusSensorEntityDescription(
|
AOSmithStatusSensorEntityDescription(
|
||||||
key="hot_water_availability",
|
key="hot_water_availability",
|
||||||
translation_key="hot_water_availability",
|
translation_key="hot_water_availability",
|
||||||
icon="mdi:water-thermometer",
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=["low", "medium", "high"],
|
options=["low", "medium", "high"],
|
||||||
value_fn=lambda device: HOT_WATER_STATUS_MAP.get(
|
value_fn=lambda device: HOT_WATER_STATUS_MAP.get(
|
||||||
|
@ -19,7 +19,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
_DESCRIPTION = BinarySensorEntityDescription(
|
_DESCRIPTION = BinarySensorEntityDescription(
|
||||||
key="statflag",
|
key="statflag",
|
||||||
name="UPS Online Status",
|
name="UPS Online Status",
|
||||||
icon="mdi:heart",
|
translation_key="online_status",
|
||||||
)
|
)
|
||||||
# The bit in STATFLAG that indicates the online status of the APC UPS.
|
# The bit in STATFLAG that indicates the online status of the APC UPS.
|
||||||
_VALUE_ONLINE_MASK: Final = 0b1000
|
_VALUE_ONLINE_MASK: Final = 0b1000
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Config flow for APCUPSd integration."""
|
"""Config flow for APCUPSd integration."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@ -54,7 +53,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
coordinator = APCUPSdCoordinator(self.hass, host, port)
|
coordinator = APCUPSdCoordinator(self.hass, host, port)
|
||||||
await coordinator.async_request_refresh()
|
await coordinator.async_request_refresh()
|
||||||
|
|
||||||
if isinstance(coordinator.last_exception, (UpdateFailed, asyncio.TimeoutError)):
|
if isinstance(coordinator.last_exception, (UpdateFailed, TimeoutError)):
|
||||||
errors = {"base": "cannot_connect"}
|
errors = {"base": "cannot_connect"}
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=_SCHEMA, errors=errors
|
step_id="user", data_schema=_SCHEMA, errors=errors
|
||||||
|
155
homeassistant/components/apcupsd/icons.json
Normal file
155
homeassistant/components/apcupsd/icons.json
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"online_status": {
|
||||||
|
"default": "mdi:heart"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sensor": {
|
||||||
|
"alarm_delay": {
|
||||||
|
"default": "mdi:alarm"
|
||||||
|
},
|
||||||
|
"apc_status": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"apc_model": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"bad_batteries": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"battery_replacement_date": {
|
||||||
|
"default": "mdi:calendar-clock"
|
||||||
|
},
|
||||||
|
"battery_status": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"cable_type": {
|
||||||
|
"default": "mdi:ethernet-cable"
|
||||||
|
},
|
||||||
|
"total_time_on_battery": {
|
||||||
|
"default": "mdi:timer-outline"
|
||||||
|
},
|
||||||
|
"date": {
|
||||||
|
"default": "mdi:calendar-clock"
|
||||||
|
},
|
||||||
|
"dip_switch_settings": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"low_battery_signal": {
|
||||||
|
"default": "mdi:clock-alert"
|
||||||
|
},
|
||||||
|
"driver": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"shutdown_delay": {
|
||||||
|
"default": "mdi:timer-outline"
|
||||||
|
},
|
||||||
|
"wake_delay": {
|
||||||
|
"default": "mdi:timer-outline"
|
||||||
|
},
|
||||||
|
"date_and_time": {
|
||||||
|
"default": "mdi:calendar-clock"
|
||||||
|
},
|
||||||
|
"external_batteries": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"firmware_version": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"hostname": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"last_self_test": {
|
||||||
|
"default": "mdi:calendar-clock"
|
||||||
|
},
|
||||||
|
"last_transfer": {
|
||||||
|
"default": "mdi:transfer"
|
||||||
|
},
|
||||||
|
"line_failure": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"load_capacity": {
|
||||||
|
"default": "mdi:gauge"
|
||||||
|
},
|
||||||
|
"apparent_power": {
|
||||||
|
"default": "mdi:gauge"
|
||||||
|
},
|
||||||
|
"manufacture_date": {
|
||||||
|
"default": "mdi:calendar"
|
||||||
|
},
|
||||||
|
"master_update": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"max_time": {
|
||||||
|
"default": "mdi:timer-off-outline"
|
||||||
|
},
|
||||||
|
"max_battery_charge": {
|
||||||
|
"default": "mdi:battery-alert"
|
||||||
|
},
|
||||||
|
"min_time": {
|
||||||
|
"default": "mdi:timer-outline"
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"transfer_count": {
|
||||||
|
"default": "mdi:counter"
|
||||||
|
},
|
||||||
|
"register_1_fault": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"register_2_fault": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"register_3_fault": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"restore_capacity": {
|
||||||
|
"default": "mdi:battery-alert"
|
||||||
|
},
|
||||||
|
"self_test_result": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"sensitivity": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"serial_number": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"startup_time": {
|
||||||
|
"default": "mdi:calendar-clock"
|
||||||
|
},
|
||||||
|
"online_status": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"self_test_interval": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"time_left": {
|
||||||
|
"default": "mdi:clock-alert"
|
||||||
|
},
|
||||||
|
"time_on_battery": {
|
||||||
|
"default": "mdi:timer-outline"
|
||||||
|
},
|
||||||
|
"ups_mode": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"ups_name": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"version": {
|
||||||
|
"default": "mdi:information-outline"
|
||||||
|
},
|
||||||
|
"transfer_from_battery": {
|
||||||
|
"default": "mdi:transfer"
|
||||||
|
},
|
||||||
|
"transfer_to_battery": {
|
||||||
|
"default": "mdi:transfer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -31,43 +31,42 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
SENSORS: dict[str, SensorEntityDescription] = {
|
SENSORS: dict[str, SensorEntityDescription] = {
|
||||||
"alarmdel": SensorEntityDescription(
|
"alarmdel": SensorEntityDescription(
|
||||||
key="alarmdel",
|
key="alarmdel",
|
||||||
|
translation_key="alarm_delay",
|
||||||
name="UPS Alarm Delay",
|
name="UPS Alarm Delay",
|
||||||
icon="mdi:alarm",
|
|
||||||
),
|
),
|
||||||
"ambtemp": SensorEntityDescription(
|
"ambtemp": SensorEntityDescription(
|
||||||
key="ambtemp",
|
key="ambtemp",
|
||||||
name="UPS Ambient Temperature",
|
name="UPS Ambient Temperature",
|
||||||
icon="mdi:thermometer",
|
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"apc": SensorEntityDescription(
|
"apc": SensorEntityDescription(
|
||||||
key="apc",
|
key="apc",
|
||||||
|
translation_key="apc_status",
|
||||||
name="UPS Status Data",
|
name="UPS Status Data",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"apcmodel": SensorEntityDescription(
|
"apcmodel": SensorEntityDescription(
|
||||||
key="apcmodel",
|
key="apcmodel",
|
||||||
|
translation_key="apc_model",
|
||||||
name="UPS Model",
|
name="UPS Model",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"badbatts": SensorEntityDescription(
|
"badbatts": SensorEntityDescription(
|
||||||
key="badbatts",
|
key="badbatts",
|
||||||
|
translation_key="bad_batteries",
|
||||||
name="UPS Bad Batteries",
|
name="UPS Bad Batteries",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"battdate": SensorEntityDescription(
|
"battdate": SensorEntityDescription(
|
||||||
key="battdate",
|
key="battdate",
|
||||||
|
translation_key="battery_replacement_date",
|
||||||
name="UPS Battery Replaced",
|
name="UPS Battery Replaced",
|
||||||
icon="mdi:calendar-clock",
|
|
||||||
),
|
),
|
||||||
"battstat": SensorEntityDescription(
|
"battstat": SensorEntityDescription(
|
||||||
key="battstat",
|
key="battstat",
|
||||||
|
translation_key="battery_status",
|
||||||
name="UPS Battery Status",
|
name="UPS Battery Status",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"battv": SensorEntityDescription(
|
"battv": SensorEntityDescription(
|
||||||
key="battv",
|
key="battv",
|
||||||
@ -80,69 +79,68 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
key="bcharge",
|
key="bcharge",
|
||||||
name="UPS Battery",
|
name="UPS Battery",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:battery",
|
|
||||||
device_class=SensorDeviceClass.BATTERY,
|
device_class=SensorDeviceClass.BATTERY,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"cable": SensorEntityDescription(
|
"cable": SensorEntityDescription(
|
||||||
key="cable",
|
key="cable",
|
||||||
|
translation_key="cable_type",
|
||||||
name="UPS Cable Type",
|
name="UPS Cable Type",
|
||||||
icon="mdi:ethernet-cable",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"cumonbatt": SensorEntityDescription(
|
"cumonbatt": SensorEntityDescription(
|
||||||
key="cumonbatt",
|
key="cumonbatt",
|
||||||
|
translation_key="total_time_on_battery",
|
||||||
name="UPS Total Time on Battery",
|
name="UPS Total Time on Battery",
|
||||||
icon="mdi:timer-outline",
|
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
),
|
),
|
||||||
"date": SensorEntityDescription(
|
"date": SensorEntityDescription(
|
||||||
key="date",
|
key="date",
|
||||||
|
translation_key="date",
|
||||||
name="UPS Status Date",
|
name="UPS Status Date",
|
||||||
icon="mdi:calendar-clock",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"dipsw": SensorEntityDescription(
|
"dipsw": SensorEntityDescription(
|
||||||
key="dipsw",
|
key="dipsw",
|
||||||
|
translation_key="dip_switch_settings",
|
||||||
name="UPS Dip Switch Settings",
|
name="UPS Dip Switch Settings",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"dlowbatt": SensorEntityDescription(
|
"dlowbatt": SensorEntityDescription(
|
||||||
key="dlowbatt",
|
key="dlowbatt",
|
||||||
|
translation_key="low_battery_signal",
|
||||||
name="UPS Low Battery Signal",
|
name="UPS Low Battery Signal",
|
||||||
icon="mdi:clock-alert",
|
|
||||||
),
|
),
|
||||||
"driver": SensorEntityDescription(
|
"driver": SensorEntityDescription(
|
||||||
key="driver",
|
key="driver",
|
||||||
|
translation_key="driver",
|
||||||
name="UPS Driver",
|
name="UPS Driver",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"dshutd": SensorEntityDescription(
|
"dshutd": SensorEntityDescription(
|
||||||
key="dshutd",
|
key="dshutd",
|
||||||
|
translation_key="shutdown_delay",
|
||||||
name="UPS Shutdown Delay",
|
name="UPS Shutdown Delay",
|
||||||
icon="mdi:timer-outline",
|
|
||||||
),
|
),
|
||||||
"dwake": SensorEntityDescription(
|
"dwake": SensorEntityDescription(
|
||||||
key="dwake",
|
key="dwake",
|
||||||
|
translation_key="wake_delay",
|
||||||
name="UPS Wake Delay",
|
name="UPS Wake Delay",
|
||||||
icon="mdi:timer-outline",
|
|
||||||
),
|
),
|
||||||
"end apc": SensorEntityDescription(
|
"end apc": SensorEntityDescription(
|
||||||
key="end apc",
|
key="end apc",
|
||||||
|
translation_key="date_and_time",
|
||||||
name="UPS Date and Time",
|
name="UPS Date and Time",
|
||||||
icon="mdi:calendar-clock",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"extbatts": SensorEntityDescription(
|
"extbatts": SensorEntityDescription(
|
||||||
key="extbatts",
|
key="extbatts",
|
||||||
|
translation_key="external_batteries",
|
||||||
name="UPS External Batteries",
|
name="UPS External Batteries",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"firmware": SensorEntityDescription(
|
"firmware": SensorEntityDescription(
|
||||||
key="firmware",
|
key="firmware",
|
||||||
|
translation_key="firmware_version",
|
||||||
name="UPS Firmware Version",
|
name="UPS Firmware Version",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"hitrans": SensorEntityDescription(
|
"hitrans": SensorEntityDescription(
|
||||||
@ -153,8 +151,8 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"hostname": SensorEntityDescription(
|
"hostname": SensorEntityDescription(
|
||||||
key="hostname",
|
key="hostname",
|
||||||
|
translation_key="hostname",
|
||||||
name="UPS Hostname",
|
name="UPS Hostname",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"humidity": SensorEntityDescription(
|
"humidity": SensorEntityDescription(
|
||||||
@ -162,7 +160,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
name="UPS Ambient Humidity",
|
name="UPS Ambient Humidity",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
device_class=SensorDeviceClass.HUMIDITY,
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
icon="mdi:water-percent",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"itemp": SensorEntityDescription(
|
"itemp": SensorEntityDescription(
|
||||||
@ -174,19 +171,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"laststest": SensorEntityDescription(
|
"laststest": SensorEntityDescription(
|
||||||
key="laststest",
|
key="laststest",
|
||||||
|
translation_key="last_self_test",
|
||||||
name="UPS Last Self Test",
|
name="UPS Last Self Test",
|
||||||
icon="mdi:calendar-clock",
|
|
||||||
),
|
),
|
||||||
"lastxfer": SensorEntityDescription(
|
"lastxfer": SensorEntityDescription(
|
||||||
key="lastxfer",
|
key="lastxfer",
|
||||||
|
translation_key="last_transfer",
|
||||||
name="UPS Last Transfer",
|
name="UPS Last Transfer",
|
||||||
icon="mdi:transfer",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"linefail": SensorEntityDescription(
|
"linefail": SensorEntityDescription(
|
||||||
key="linefail",
|
key="linefail",
|
||||||
|
translation_key="line_failure",
|
||||||
name="UPS Input Voltage Status",
|
name="UPS Input Voltage Status",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"linefreq": SensorEntityDescription(
|
"linefreq": SensorEntityDescription(
|
||||||
key="linefreq",
|
key="linefreq",
|
||||||
@ -204,16 +201,16 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"loadpct": SensorEntityDescription(
|
"loadpct": SensorEntityDescription(
|
||||||
key="loadpct",
|
key="loadpct",
|
||||||
|
translation_key="load_capacity",
|
||||||
name="UPS Load",
|
name="UPS Load",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:gauge",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"loadapnt": SensorEntityDescription(
|
"loadapnt": SensorEntityDescription(
|
||||||
key="loadapnt",
|
key="loadapnt",
|
||||||
|
translation_key="apparent_power",
|
||||||
name="UPS Load Apparent Power",
|
name="UPS Load Apparent Power",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:gauge",
|
|
||||||
),
|
),
|
||||||
"lotrans": SensorEntityDescription(
|
"lotrans": SensorEntityDescription(
|
||||||
key="lotrans",
|
key="lotrans",
|
||||||
@ -223,14 +220,14 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"mandate": SensorEntityDescription(
|
"mandate": SensorEntityDescription(
|
||||||
key="mandate",
|
key="mandate",
|
||||||
|
translation_key="manufacture_date",
|
||||||
name="UPS Manufacture Date",
|
name="UPS Manufacture Date",
|
||||||
icon="mdi:calendar",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"masterupd": SensorEntityDescription(
|
"masterupd": SensorEntityDescription(
|
||||||
key="masterupd",
|
key="masterupd",
|
||||||
|
translation_key="master_update",
|
||||||
name="UPS Master Update",
|
name="UPS Master Update",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"maxlinev": SensorEntityDescription(
|
"maxlinev": SensorEntityDescription(
|
||||||
key="maxlinev",
|
key="maxlinev",
|
||||||
@ -240,14 +237,14 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"maxtime": SensorEntityDescription(
|
"maxtime": SensorEntityDescription(
|
||||||
key="maxtime",
|
key="maxtime",
|
||||||
|
translation_key="max_time",
|
||||||
name="UPS Battery Timeout",
|
name="UPS Battery Timeout",
|
||||||
icon="mdi:timer-off-outline",
|
|
||||||
),
|
),
|
||||||
"mbattchg": SensorEntityDescription(
|
"mbattchg": SensorEntityDescription(
|
||||||
key="mbattchg",
|
key="mbattchg",
|
||||||
|
translation_key="max_battery_charge",
|
||||||
name="UPS Battery Shutdown",
|
name="UPS Battery Shutdown",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:battery-alert",
|
|
||||||
),
|
),
|
||||||
"minlinev": SensorEntityDescription(
|
"minlinev": SensorEntityDescription(
|
||||||
key="minlinev",
|
key="minlinev",
|
||||||
@ -257,13 +254,13 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"mintimel": SensorEntityDescription(
|
"mintimel": SensorEntityDescription(
|
||||||
key="mintimel",
|
key="mintimel",
|
||||||
|
translation_key="min_time",
|
||||||
name="UPS Shutdown Time",
|
name="UPS Shutdown Time",
|
||||||
icon="mdi:timer-outline",
|
|
||||||
),
|
),
|
||||||
"model": SensorEntityDescription(
|
"model": SensorEntityDescription(
|
||||||
key="model",
|
key="model",
|
||||||
|
translation_key="model",
|
||||||
name="UPS Model",
|
name="UPS Model",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"nombattv": SensorEntityDescription(
|
"nombattv": SensorEntityDescription(
|
||||||
@ -298,8 +295,8 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"numxfers": SensorEntityDescription(
|
"numxfers": SensorEntityDescription(
|
||||||
key="numxfers",
|
key="numxfers",
|
||||||
|
translation_key="transfer_count",
|
||||||
name="UPS Transfer Count",
|
name="UPS Transfer Count",
|
||||||
icon="mdi:counter",
|
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
),
|
),
|
||||||
"outcurnt": SensorEntityDescription(
|
"outcurnt": SensorEntityDescription(
|
||||||
@ -318,109 +315,109 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
|||||||
),
|
),
|
||||||
"reg1": SensorEntityDescription(
|
"reg1": SensorEntityDescription(
|
||||||
key="reg1",
|
key="reg1",
|
||||||
|
translation_key="register_1_fault",
|
||||||
name="UPS Register 1 Fault",
|
name="UPS Register 1 Fault",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"reg2": SensorEntityDescription(
|
"reg2": SensorEntityDescription(
|
||||||
key="reg2",
|
key="reg2",
|
||||||
|
translation_key="register_2_fault",
|
||||||
name="UPS Register 2 Fault",
|
name="UPS Register 2 Fault",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"reg3": SensorEntityDescription(
|
"reg3": SensorEntityDescription(
|
||||||
key="reg3",
|
key="reg3",
|
||||||
|
translation_key="register_3_fault",
|
||||||
name="UPS Register 3 Fault",
|
name="UPS Register 3 Fault",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"retpct": SensorEntityDescription(
|
"retpct": SensorEntityDescription(
|
||||||
key="retpct",
|
key="retpct",
|
||||||
|
translation_key="restore_capacity",
|
||||||
name="UPS Restore Requirement",
|
name="UPS Restore Requirement",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:battery-alert",
|
|
||||||
),
|
),
|
||||||
"selftest": SensorEntityDescription(
|
"selftest": SensorEntityDescription(
|
||||||
key="selftest",
|
key="selftest",
|
||||||
|
translation_key="self_test_result",
|
||||||
name="UPS Self Test result",
|
name="UPS Self Test result",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"sense": SensorEntityDescription(
|
"sense": SensorEntityDescription(
|
||||||
key="sense",
|
key="sense",
|
||||||
|
translation_key="sensitivity",
|
||||||
name="UPS Sensitivity",
|
name="UPS Sensitivity",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"serialno": SensorEntityDescription(
|
"serialno": SensorEntityDescription(
|
||||||
key="serialno",
|
key="serialno",
|
||||||
|
translation_key="serial_number",
|
||||||
name="UPS Serial Number",
|
name="UPS Serial Number",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"starttime": SensorEntityDescription(
|
"starttime": SensorEntityDescription(
|
||||||
key="starttime",
|
key="starttime",
|
||||||
|
translation_key="startup_time",
|
||||||
name="UPS Startup Time",
|
name="UPS Startup Time",
|
||||||
icon="mdi:calendar-clock",
|
|
||||||
),
|
),
|
||||||
"statflag": SensorEntityDescription(
|
"statflag": SensorEntityDescription(
|
||||||
key="statflag",
|
key="statflag",
|
||||||
|
translation_key="online_status",
|
||||||
name="UPS Status Flag",
|
name="UPS Status Flag",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"status": SensorEntityDescription(
|
"status": SensorEntityDescription(
|
||||||
key="status",
|
key="status",
|
||||||
|
translation_key="status",
|
||||||
name="UPS Status",
|
name="UPS Status",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"stesti": SensorEntityDescription(
|
"stesti": SensorEntityDescription(
|
||||||
key="stesti",
|
key="stesti",
|
||||||
|
translation_key="self_test_interval",
|
||||||
name="UPS Self Test Interval",
|
name="UPS Self Test Interval",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"timeleft": SensorEntityDescription(
|
"timeleft": SensorEntityDescription(
|
||||||
key="timeleft",
|
key="timeleft",
|
||||||
|
translation_key="time_left",
|
||||||
name="UPS Time Left",
|
name="UPS Time Left",
|
||||||
icon="mdi:clock-alert",
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
"tonbatt": SensorEntityDescription(
|
"tonbatt": SensorEntityDescription(
|
||||||
key="tonbatt",
|
key="tonbatt",
|
||||||
|
translation_key="time_on_battery",
|
||||||
name="UPS Time on Battery",
|
name="UPS Time on Battery",
|
||||||
icon="mdi:timer-outline",
|
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||||
),
|
),
|
||||||
"upsmode": SensorEntityDescription(
|
"upsmode": SensorEntityDescription(
|
||||||
key="upsmode",
|
key="upsmode",
|
||||||
|
translation_key="ups_mode",
|
||||||
name="UPS Mode",
|
name="UPS Mode",
|
||||||
icon="mdi:information-outline",
|
|
||||||
),
|
),
|
||||||
"upsname": SensorEntityDescription(
|
"upsname": SensorEntityDescription(
|
||||||
key="upsname",
|
key="upsname",
|
||||||
|
translation_key="ups_name",
|
||||||
name="UPS Name",
|
name="UPS Name",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"version": SensorEntityDescription(
|
"version": SensorEntityDescription(
|
||||||
key="version",
|
key="version",
|
||||||
|
translation_key="version",
|
||||||
name="UPS Daemon Info",
|
name="UPS Daemon Info",
|
||||||
icon="mdi:information-outline",
|
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
"xoffbat": SensorEntityDescription(
|
"xoffbat": SensorEntityDescription(
|
||||||
key="xoffbat",
|
key="xoffbat",
|
||||||
|
translation_key="transfer_from_battery",
|
||||||
name="UPS Transfer from Battery",
|
name="UPS Transfer from Battery",
|
||||||
icon="mdi:transfer",
|
|
||||||
),
|
),
|
||||||
"xoffbatt": SensorEntityDescription(
|
"xoffbatt": SensorEntityDescription(
|
||||||
key="xoffbatt",
|
key="xoffbatt",
|
||||||
|
translation_key="transfer_from_battery",
|
||||||
name="UPS Transfer from Battery",
|
name="UPS Transfer from Battery",
|
||||||
icon="mdi:transfer",
|
|
||||||
),
|
),
|
||||||
"xonbatt": SensorEntityDescription(
|
"xonbatt": SensorEntityDescription(
|
||||||
key="xonbatt",
|
key="xonbatt",
|
||||||
|
translation_key="transfer_to_battery",
|
||||||
name="UPS Transfer to Battery",
|
name="UPS Transfer to Battery",
|
||||||
icon="mdi:transfer",
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.auth.models import User
|
from homeassistant.auth.models import User
|
||||||
from homeassistant.auth.permissions.const import POLICY_READ
|
from homeassistant.auth.permissions.const import POLICY_READ
|
||||||
from homeassistant.bootstrap import DATA_LOGGING
|
|
||||||
from homeassistant.components.http import (
|
from homeassistant.components.http import (
|
||||||
KEY_HASS,
|
KEY_HASS,
|
||||||
KEY_HASS_USER,
|
KEY_HASS_USER,
|
||||||
@ -23,6 +22,7 @@ from homeassistant.const import (
|
|||||||
CONTENT_TYPE_JSON,
|
CONTENT_TYPE_JSON,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
EVENT_STATE_CHANGED,
|
EVENT_STATE_CHANGED,
|
||||||
|
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||||
MATCH_ALL,
|
MATCH_ALL,
|
||||||
URL_API,
|
URL_API,
|
||||||
URL_API_COMPONENTS,
|
URL_API_COMPONENTS,
|
||||||
@ -175,7 +175,7 @@ class APIEventStream(HomeAssistantView):
|
|||||||
msg = f"data: {payload}\n\n"
|
msg = f"data: {payload}\n\n"
|
||||||
_LOGGER.debug("STREAM %s WRITING %s", id(stop_obj), msg.strip())
|
_LOGGER.debug("STREAM %s WRITING %s", id(stop_obj), msg.strip())
|
||||||
await response.write(msg.encode("UTF-8"))
|
await response.write(msg.encode("UTF-8"))
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
await to_write.put(STREAM_PING_PAYLOAD)
|
await to_write.put(STREAM_PING_PAYLOAD)
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
@ -222,7 +222,7 @@ class APIStatesView(HomeAssistantView):
|
|||||||
if entity_perm(state.entity_id, "read")
|
if entity_perm(state.entity_id, "read")
|
||||||
)
|
)
|
||||||
response = web.Response(
|
response = web.Response(
|
||||||
body=b"[" + b",".join(states) + b"]",
|
body=b"".join((b"[", b",".join(states), b"]")),
|
||||||
content_type=CONTENT_TYPE_JSON,
|
content_type=CONTENT_TYPE_JSON,
|
||||||
zlib_executor_size=32768,
|
zlib_executor_size=32768,
|
||||||
)
|
)
|
||||||
@ -472,7 +472,9 @@ class APIErrorLog(HomeAssistantView):
|
|||||||
async def get(self, request: web.Request) -> web.FileResponse:
|
async def get(self, request: web.Request) -> web.FileResponse:
|
||||||
"""Retrieve API error log."""
|
"""Retrieve API error log."""
|
||||||
hass: HomeAssistant = request.app[KEY_HASS]
|
hass: HomeAssistant = request.app[KEY_HASS]
|
||||||
return web.FileResponse(hass.data[DATA_LOGGING])
|
response = web.FileResponse(hass.data[DATA_LOGGING])
|
||||||
|
response.enable_compression()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
async def async_services_json(hass: HomeAssistant) -> list[dict[str, Any]]:
|
async def async_services_json(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
"""The Apple TV integration."""
|
"""The Apple TV integration."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from random import randrange
|
from random import randrange
|
||||||
from typing import TYPE_CHECKING, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from pyatv import connect, exceptions, scan
|
from pyatv import connect, exceptions, scan
|
||||||
from pyatv.conf import AppleTV
|
from pyatv.conf import AppleTV
|
||||||
@ -25,8 +27,8 @@ from homeassistant.const import (
|
|||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
@ -40,7 +42,8 @@ from .const import CONF_CREDENTIALS, CONF_IDENTIFIERS, CONF_START_OFF, DOMAIN
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_NAME = "Apple TV"
|
DEFAULT_NAME_TV = "Apple TV"
|
||||||
|
DEFAULT_NAME_HP = "HomePod"
|
||||||
|
|
||||||
BACKOFF_TIME_LOWER_LIMIT = 15 # seconds
|
BACKOFF_TIME_LOWER_LIMIT = 15 # seconds
|
||||||
BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
||||||
@ -56,14 +59,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
manager = AppleTVManager(hass, entry)
|
manager = AppleTVManager(hass, entry)
|
||||||
|
|
||||||
if manager.is_on:
|
if manager.is_on:
|
||||||
await manager.connect_once(raise_missing_credentials=True)
|
address = entry.data[CONF_ADDRESS]
|
||||||
if not manager.atv:
|
|
||||||
address = entry.data[CONF_ADDRESS]
|
try:
|
||||||
raise ConfigEntryNotReady(f"Not found at {address}, waiting for discovery")
|
await manager.async_first_connect()
|
||||||
|
except (
|
||||||
|
exceptions.AuthenticationError,
|
||||||
|
exceptions.InvalidCredentialsError,
|
||||||
|
exceptions.NoCredentialsError,
|
||||||
|
) as ex:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
f"{address}: Authentication failed, try reconfiguring device: {ex}"
|
||||||
|
) from ex
|
||||||
|
except (
|
||||||
|
asyncio.CancelledError,
|
||||||
|
exceptions.ConnectionLostError,
|
||||||
|
exceptions.ConnectionFailedError,
|
||||||
|
) as ex:
|
||||||
|
raise ConfigEntryNotReady(f"{address}: {ex}") from ex
|
||||||
|
except (
|
||||||
|
exceptions.ProtocolError,
|
||||||
|
exceptions.NoServiceError,
|
||||||
|
exceptions.PairingError,
|
||||||
|
exceptions.BackOffError,
|
||||||
|
exceptions.DeviceIdMissingError,
|
||||||
|
) as ex:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Error setting up apple_tv at %s: %s", address, ex, exc_info=ex
|
||||||
|
)
|
||||||
|
raise ConfigEntryNotReady(f"{address}: {ex}") from ex
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})[entry.unique_id] = manager
|
hass.data.setdefault(DOMAIN, {})[entry.unique_id] = manager
|
||||||
|
|
||||||
async def on_hass_stop(event):
|
async def on_hass_stop(event: Event) -> None:
|
||||||
"""Stop push updates when hass stops."""
|
"""Stop push updates when hass stops."""
|
||||||
await manager.disconnect()
|
await manager.disconnect()
|
||||||
|
|
||||||
@ -94,33 +122,29 @@ class AppleTVEntity(Entity):
|
|||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
|
atv: AppleTVInterface | None = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None:
|
||||||
self, name: str, identifier: str | None, manager: "AppleTVManager"
|
|
||||||
) -> None:
|
|
||||||
"""Initialize device."""
|
"""Initialize device."""
|
||||||
self.atv: AppleTVInterface = None # type: ignore[assignment]
|
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert identifier is not None
|
|
||||||
self._attr_unique_id = identifier
|
self._attr_unique_id = identifier
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, identifier)},
|
identifiers={(DOMAIN, identifier)},
|
||||||
name=name,
|
name=name,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_added_to_hass(self):
|
async def async_added_to_hass(self) -> None:
|
||||||
"""Handle when an entity is about to be added to Home Assistant."""
|
"""Handle when an entity is about to be added to Home Assistant."""
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_connected(atv):
|
def _async_connected(atv: AppleTVInterface) -> None:
|
||||||
"""Handle that a connection was made to a device."""
|
"""Handle that a connection was made to a device."""
|
||||||
self.atv = atv
|
self.atv = atv
|
||||||
self.async_device_connected(atv)
|
self.async_device_connected(atv)
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_disconnected():
|
def _async_disconnected() -> None:
|
||||||
"""Handle that a connection to a device was lost."""
|
"""Handle that a connection to a device was lost."""
|
||||||
self.async_device_disconnected()
|
self.async_device_disconnected()
|
||||||
self.atv = None
|
self.atv = None
|
||||||
@ -143,10 +167,10 @@ class AppleTVEntity(Entity):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def async_device_connected(self, atv):
|
def async_device_connected(self, atv: AppleTVInterface) -> None:
|
||||||
"""Handle when connection is made to device."""
|
"""Handle when connection is made to device."""
|
||||||
|
|
||||||
def async_device_disconnected(self):
|
def async_device_disconnected(self) -> None:
|
||||||
"""Handle when connection was lost to device."""
|
"""Handle when connection was lost to device."""
|
||||||
|
|
||||||
|
|
||||||
@ -158,22 +182,23 @@ class AppleTVManager(DeviceListener):
|
|||||||
in case of problems.
|
in case of problems.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
atv: AppleTVInterface | None = None
|
||||||
|
_connection_attempts = 0
|
||||||
|
_connection_was_lost = False
|
||||||
|
_task: asyncio.Task[None] | None = None
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||||
"""Initialize power manager."""
|
"""Initialize power manager."""
|
||||||
self.config_entry = config_entry
|
self.config_entry = config_entry
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.atv: AppleTVInterface | None = None
|
|
||||||
self.is_on = not config_entry.options.get(CONF_START_OFF, False)
|
self.is_on = not config_entry.options.get(CONF_START_OFF, False)
|
||||||
self._connection_attempts = 0
|
|
||||||
self._connection_was_lost = False
|
|
||||||
self._task = None
|
|
||||||
|
|
||||||
async def init(self):
|
async def init(self) -> None:
|
||||||
"""Initialize power management."""
|
"""Initialize power management."""
|
||||||
if self.is_on:
|
if self.is_on:
|
||||||
await self.connect()
|
await self.connect()
|
||||||
|
|
||||||
def connection_lost(self, _):
|
def connection_lost(self, exception: Exception) -> None:
|
||||||
"""Device was unexpectedly disconnected.
|
"""Device was unexpectedly disconnected.
|
||||||
|
|
||||||
This is a callback function from pyatv.interface.DeviceListener.
|
This is a callback function from pyatv.interface.DeviceListener.
|
||||||
@ -184,14 +209,14 @@ class AppleTVManager(DeviceListener):
|
|||||||
self._connection_was_lost = True
|
self._connection_was_lost = True
|
||||||
self._handle_disconnect()
|
self._handle_disconnect()
|
||||||
|
|
||||||
def connection_closed(self):
|
def connection_closed(self) -> None:
|
||||||
"""Device connection was (intentionally) closed.
|
"""Device connection was (intentionally) closed.
|
||||||
|
|
||||||
This is a callback function from pyatv.interface.DeviceListener.
|
This is a callback function from pyatv.interface.DeviceListener.
|
||||||
"""
|
"""
|
||||||
self._handle_disconnect()
|
self._handle_disconnect()
|
||||||
|
|
||||||
def _handle_disconnect(self):
|
def _handle_disconnect(self) -> None:
|
||||||
"""Handle that the device disconnected and restart connect loop."""
|
"""Handle that the device disconnected and restart connect loop."""
|
||||||
if self.atv:
|
if self.atv:
|
||||||
self.atv.close()
|
self.atv.close()
|
||||||
@ -199,12 +224,12 @@ class AppleTVManager(DeviceListener):
|
|||||||
self._dispatch_send(SIGNAL_DISCONNECTED)
|
self._dispatch_send(SIGNAL_DISCONNECTED)
|
||||||
self._start_connect_loop()
|
self._start_connect_loop()
|
||||||
|
|
||||||
async def connect(self):
|
async def connect(self) -> None:
|
||||||
"""Connect to device."""
|
"""Connect to device."""
|
||||||
self.is_on = True
|
self.is_on = True
|
||||||
self._start_connect_loop()
|
self._start_connect_loop()
|
||||||
|
|
||||||
async def disconnect(self):
|
async def disconnect(self) -> None:
|
||||||
"""Disconnect from device."""
|
"""Disconnect from device."""
|
||||||
_LOGGER.debug("Disconnecting from device")
|
_LOGGER.debug("Disconnecting from device")
|
||||||
self.is_on = False
|
self.is_on = False
|
||||||
@ -218,7 +243,7 @@ class AppleTVManager(DeviceListener):
|
|||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("An error occurred while disconnecting")
|
_LOGGER.exception("An error occurred while disconnecting")
|
||||||
|
|
||||||
def _start_connect_loop(self):
|
def _start_connect_loop(self) -> None:
|
||||||
"""Start background connect loop to device."""
|
"""Start background connect loop to device."""
|
||||||
if not self._task and self.atv is None and self.is_on:
|
if not self._task and self.atv is None and self.is_on:
|
||||||
self._task = asyncio.create_task(self._connect_loop())
|
self._task = asyncio.create_task(self._connect_loop())
|
||||||
@ -227,11 +252,25 @@ class AppleTVManager(DeviceListener):
|
|||||||
"Not starting connect loop (%s, %s)", self.atv is None, self.is_on
|
"Not starting connect loop (%s, %s)", self.atv is None, self.is_on
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _connect_once(self, raise_missing_credentials: bool) -> None:
|
||||||
|
"""Connect to device once."""
|
||||||
|
if conf := await self._scan():
|
||||||
|
await self._connect(conf, raise_missing_credentials)
|
||||||
|
|
||||||
|
async def async_first_connect(self) -> None:
|
||||||
|
"""Connect to device for the first time."""
|
||||||
|
connect_ok = False
|
||||||
|
try:
|
||||||
|
await self._connect_once(raise_missing_credentials=True)
|
||||||
|
connect_ok = True
|
||||||
|
finally:
|
||||||
|
if not connect_ok:
|
||||||
|
await self.disconnect()
|
||||||
|
|
||||||
async def connect_once(self, raise_missing_credentials: bool) -> None:
|
async def connect_once(self, raise_missing_credentials: bool) -> None:
|
||||||
"""Try to connect once."""
|
"""Try to connect once."""
|
||||||
try:
|
try:
|
||||||
if conf := await self._scan():
|
await self._connect_once(raise_missing_credentials)
|
||||||
await self._connect(conf, raise_missing_credentials)
|
|
||||||
except exceptions.AuthenticationError:
|
except exceptions.AuthenticationError:
|
||||||
self.config_entry.async_start_reauth(self.hass)
|
self.config_entry.async_start_reauth(self.hass)
|
||||||
await self.disconnect()
|
await self.disconnect()
|
||||||
@ -244,9 +283,9 @@ class AppleTVManager(DeviceListener):
|
|||||||
pass
|
pass
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Failed to connect")
|
_LOGGER.exception("Failed to connect")
|
||||||
self.atv = None
|
await self.disconnect()
|
||||||
|
|
||||||
async def _connect_loop(self):
|
async def _connect_loop(self) -> None:
|
||||||
"""Connect loop background task function."""
|
"""Connect loop background task function."""
|
||||||
_LOGGER.debug("Starting connect loop")
|
_LOGGER.debug("Starting connect loop")
|
||||||
|
|
||||||
@ -255,7 +294,8 @@ class AppleTVManager(DeviceListener):
|
|||||||
while self.is_on and self.atv is None:
|
while self.is_on and self.atv is None:
|
||||||
await self.connect_once(raise_missing_credentials=False)
|
await self.connect_once(raise_missing_credentials=False)
|
||||||
if self.atv is not None:
|
if self.atv is not None:
|
||||||
break
|
# Calling self.connect_once may have set self.atv
|
||||||
|
break # type: ignore[unreachable]
|
||||||
self._connection_attempts += 1
|
self._connection_attempts += 1
|
||||||
backoff = min(
|
backoff = min(
|
||||||
max(
|
max(
|
||||||
@ -352,13 +392,17 @@ class AppleTVManager(DeviceListener):
|
|||||||
self._connection_was_lost = False
|
self._connection_was_lost = False
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_setup_device_registry(self):
|
def _async_setup_device_registry(self) -> None:
|
||||||
attrs = {
|
attrs = {
|
||||||
ATTR_IDENTIFIERS: {(DOMAIN, self.config_entry.unique_id)},
|
ATTR_IDENTIFIERS: {(DOMAIN, self.config_entry.unique_id)},
|
||||||
ATTR_MANUFACTURER: "Apple",
|
ATTR_MANUFACTURER: "Apple",
|
||||||
ATTR_NAME: self.config_entry.data[CONF_NAME],
|
ATTR_NAME: self.config_entry.data[CONF_NAME],
|
||||||
}
|
}
|
||||||
attrs[ATTR_SUGGESTED_AREA] = attrs[ATTR_NAME].removesuffix(f" {DEFAULT_NAME}")
|
attrs[ATTR_SUGGESTED_AREA] = (
|
||||||
|
attrs[ATTR_NAME]
|
||||||
|
.removesuffix(f" {DEFAULT_NAME_TV}")
|
||||||
|
.removesuffix(f" {DEFAULT_NAME_HP}")
|
||||||
|
)
|
||||||
|
|
||||||
if self.atv:
|
if self.atv:
|
||||||
dev_info = self.atv.device_info
|
dev_info = self.atv.device_info
|
||||||
@ -379,18 +423,18 @@ class AppleTVManager(DeviceListener):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_connecting(self):
|
def is_connecting(self) -> bool:
|
||||||
"""Return true if connection is in progress."""
|
"""Return true if connection is in progress."""
|
||||||
return self._task is not None
|
return self._task is not None
|
||||||
|
|
||||||
def _address_updated(self, address):
|
def _address_updated(self, address: str) -> None:
|
||||||
"""Update cached address in config entry."""
|
"""Update cached address in config entry."""
|
||||||
_LOGGER.debug("Changing address to %s", address)
|
_LOGGER.debug("Changing address to %s", address)
|
||||||
self.hass.config_entries.async_update_entry(
|
self.hass.config_entries.async_update_entry(
|
||||||
self.config_entry, data={**self.config_entry.data, CONF_ADDRESS: address}
|
self.config_entry, data={**self.config_entry.data, CONF_ADDRESS: address}
|
||||||
)
|
)
|
||||||
|
|
||||||
def _dispatch_send(self, signal, *args):
|
def _dispatch_send(self, signal: str, *args: Any) -> None:
|
||||||
"""Dispatch a signal to all entities managed by this manager."""
|
"""Dispatch a signal to all entities managed by this manager."""
|
||||||
async_dispatcher_send(
|
async_dispatcher_send(
|
||||||
self.hass, f"{signal}_{self.config_entry.unique_id}", *args
|
self.hass, f"{signal}_{self.config_entry.unique_id}", *args
|
||||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from collections.abc import Mapping
|
from collections.abc import Awaitable, Callable, Mapping
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
import logging
|
import logging
|
||||||
from random import randrange
|
from random import randrange
|
||||||
@ -13,12 +13,13 @@ from pyatv import exceptions, pair, scan
|
|||||||
from pyatv.const import DeviceModel, PairingRequirement, Protocol
|
from pyatv.const import DeviceModel, PairingRequirement, Protocol
|
||||||
from pyatv.convert import model_str, protocol_str
|
from pyatv.convert import model_str, protocol_str
|
||||||
from pyatv.helpers import get_unique_id
|
from pyatv.helpers import get_unique_id
|
||||||
|
from pyatv.interface import BaseConfig, PairingHandler
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components import zeroconf
|
from homeassistant.components import zeroconf
|
||||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME, CONF_PIN
|
from homeassistant.const import CONF_ADDRESS, CONF_NAME, CONF_PIN
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.data_entry_flow import AbortFlow, FlowResult
|
from homeassistant.data_entry_flow import AbortFlow, FlowResult
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
@ -49,10 +50,12 @@ OPTIONS_FLOW = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def device_scan(hass, identifier, loop):
|
async def device_scan(
|
||||||
|
hass: HomeAssistant, identifier: str | None, loop: asyncio.AbstractEventLoop
|
||||||
|
) -> tuple[BaseConfig | None, list[str] | None]:
|
||||||
"""Scan for a specific device using identifier as filter."""
|
"""Scan for a specific device using identifier as filter."""
|
||||||
|
|
||||||
def _filter_device(dev):
|
def _filter_device(dev: BaseConfig) -> bool:
|
||||||
if identifier is None:
|
if identifier is None:
|
||||||
return True
|
return True
|
||||||
if identifier == str(dev.address):
|
if identifier == str(dev.address):
|
||||||
@ -61,9 +64,12 @@ async def device_scan(hass, identifier, loop):
|
|||||||
return True
|
return True
|
||||||
return any(service.identifier == identifier for service in dev.services)
|
return any(service.identifier == identifier for service in dev.services)
|
||||||
|
|
||||||
def _host_filter():
|
def _host_filter() -> list[str] | None:
|
||||||
|
if identifier is None:
|
||||||
|
return None
|
||||||
try:
|
try:
|
||||||
return [ip_address(identifier)]
|
ip_address(identifier)
|
||||||
|
return [identifier]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -84,6 +90,13 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
|
scan_filter: str | None = None
|
||||||
|
atv: BaseConfig | None = None
|
||||||
|
atv_identifiers: list[str] | None = None
|
||||||
|
protocol: Protocol | None = None
|
||||||
|
pairing: PairingHandler | None = None
|
||||||
|
protocols_to_pair: deque[Protocol] | None = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
def async_get_options_flow(
|
def async_get_options_flow(
|
||||||
@ -92,18 +105,12 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
"""Get options flow for this handler."""
|
"""Get options flow for this handler."""
|
||||||
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)
|
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
"""Initialize a new AppleTVConfigFlow."""
|
"""Initialize a new AppleTVConfigFlow."""
|
||||||
self.scan_filter = None
|
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||||
self.atv = None
|
|
||||||
self.atv_identifiers = None
|
|
||||||
self.protocol = None
|
|
||||||
self.pairing = None
|
|
||||||
self.credentials = {} # Protocol -> credentials
|
|
||||||
self.protocols_to_pair = deque()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def device_identifier(self):
|
def device_identifier(self) -> str | None:
|
||||||
"""Return a identifier for the config entry.
|
"""Return a identifier for the config entry.
|
||||||
|
|
||||||
A device has multiple unique identifiers, but Home Assistant only supports one
|
A device has multiple unique identifiers, but Home Assistant only supports one
|
||||||
@ -118,6 +125,7 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
existing config entry. If that's the case, the unique_id from that entry is
|
existing config entry. If that's the case, the unique_id from that entry is
|
||||||
re-used, otherwise the newly discovered identifier is used instead.
|
re-used, otherwise the newly discovered identifier is used instead.
|
||||||
"""
|
"""
|
||||||
|
assert self.atv
|
||||||
all_identifiers = set(self.atv.all_identifiers)
|
all_identifiers = set(self.atv.all_identifiers)
|
||||||
if unique_id := self._entry_unique_id_from_identifers(all_identifiers):
|
if unique_id := self._entry_unique_id_from_identifers(all_identifiers):
|
||||||
return unique_id
|
return unique_id
|
||||||
@ -143,7 +151,9 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
self.context["identifier"] = self.unique_id
|
self.context["identifier"] = self.unique_id
|
||||||
return await self.async_step_reconfigure()
|
return await self.async_step_reconfigure()
|
||||||
|
|
||||||
async def async_step_reconfigure(self, user_input=None):
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Inform user that reconfiguration is about to start."""
|
"""Inform user that reconfiguration is about to start."""
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_find_device_wrapper(
|
return await self.async_find_device_wrapper(
|
||||||
@ -152,7 +162,9 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
return self.async_show_form(step_id="reconfigure")
|
return self.async_show_form(step_id="reconfigure")
|
||||||
|
|
||||||
async def async_step_user(self, user_input=None):
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Handle the initial step."""
|
"""Handle the initial step."""
|
||||||
errors = {}
|
errors = {}
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
@ -170,6 +182,7 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
await self.async_set_unique_id(
|
await self.async_set_unique_id(
|
||||||
self.device_identifier, raise_on_progress=False
|
self.device_identifier, raise_on_progress=False
|
||||||
)
|
)
|
||||||
|
assert self.atv
|
||||||
self.context["all_identifiers"] = self.atv.all_identifiers
|
self.context["all_identifiers"] = self.atv.all_identifiers
|
||||||
return await self.async_step_confirm()
|
return await self.async_step_confirm()
|
||||||
|
|
||||||
@ -275,8 +288,11 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
context["all_identifiers"].append(unique_id)
|
context["all_identifiers"].append(unique_id)
|
||||||
raise AbortFlow("already_in_progress")
|
raise AbortFlow("already_in_progress")
|
||||||
|
|
||||||
async def async_found_zeroconf_device(self, user_input=None):
|
async def async_found_zeroconf_device(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Handle device found after Zeroconf discovery."""
|
"""Handle device found after Zeroconf discovery."""
|
||||||
|
assert self.atv
|
||||||
self.context["all_identifiers"] = self.atv.all_identifiers
|
self.context["all_identifiers"] = self.atv.all_identifiers
|
||||||
# Also abort if an integration with this identifier already exists
|
# Also abort if an integration with this identifier already exists
|
||||||
await self.async_set_unique_id(self.device_identifier)
|
await self.async_set_unique_id(self.device_identifier)
|
||||||
@ -288,7 +304,11 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
self.context["identifier"] = self.unique_id
|
self.context["identifier"] = self.unique_id
|
||||||
return await self.async_step_confirm()
|
return await self.async_step_confirm()
|
||||||
|
|
||||||
async def async_find_device_wrapper(self, next_func, allow_exist=False):
|
async def async_find_device_wrapper(
|
||||||
|
self,
|
||||||
|
next_func: Callable[[], Awaitable[FlowResult]],
|
||||||
|
allow_exist: bool = False,
|
||||||
|
) -> FlowResult:
|
||||||
"""Find a specific device and call another function when done.
|
"""Find a specific device and call another function when done.
|
||||||
|
|
||||||
This function will do error handling and bail out when an error
|
This function will do error handling and bail out when an error
|
||||||
@ -306,7 +326,7 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
return await next_func()
|
return await next_func()
|
||||||
|
|
||||||
async def async_find_device(self, allow_exist=False):
|
async def async_find_device(self, allow_exist: bool = False) -> None:
|
||||||
"""Scan for the selected device to discover services."""
|
"""Scan for the selected device to discover services."""
|
||||||
self.atv, self.atv_identifiers = await device_scan(
|
self.atv, self.atv_identifiers = await device_scan(
|
||||||
self.hass, self.scan_filter, self.hass.loop
|
self.hass, self.scan_filter, self.hass.loop
|
||||||
@ -357,8 +377,11 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
if not allow_exist:
|
if not allow_exist:
|
||||||
raise DeviceAlreadyConfigured()
|
raise DeviceAlreadyConfigured()
|
||||||
|
|
||||||
async def async_step_confirm(self, user_input=None):
|
async def async_step_confirm(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Handle user-confirmation of discovered node."""
|
"""Handle user-confirmation of discovered node."""
|
||||||
|
assert self.atv
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
expected_identifier_count = len(self.context["all_identifiers"])
|
expected_identifier_count = len(self.context["all_identifiers"])
|
||||||
# If number of services found during device scan mismatch number of
|
# If number of services found during device scan mismatch number of
|
||||||
@ -384,7 +407,7 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_pair_next_protocol(self):
|
async def async_pair_next_protocol(self) -> FlowResult:
|
||||||
"""Start pairing process for the next available protocol."""
|
"""Start pairing process for the next available protocol."""
|
||||||
await self._async_cleanup()
|
await self._async_cleanup()
|
||||||
|
|
||||||
@ -393,8 +416,16 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
return await self._async_get_entry()
|
return await self._async_get_entry()
|
||||||
|
|
||||||
self.protocol = self.protocols_to_pair.popleft()
|
self.protocol = self.protocols_to_pair.popleft()
|
||||||
|
assert self.atv
|
||||||
service = self.atv.get_service(self.protocol)
|
service = self.atv.get_service(self.protocol)
|
||||||
|
|
||||||
|
if service is None:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"%s does not support pairing (cannot find a corresponding service)",
|
||||||
|
self.protocol,
|
||||||
|
)
|
||||||
|
return await self.async_pair_next_protocol()
|
||||||
|
|
||||||
# Service requires a password
|
# Service requires a password
|
||||||
if service.requires_password:
|
if service.requires_password:
|
||||||
return await self.async_step_password()
|
return await self.async_step_password()
|
||||||
@ -413,7 +444,7 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
_LOGGER.debug("%s requires pairing", self.protocol)
|
_LOGGER.debug("%s requires pairing", self.protocol)
|
||||||
|
|
||||||
# Protocol specific arguments
|
# Protocol specific arguments
|
||||||
pair_args = {}
|
pair_args: dict[str, Any] = {}
|
||||||
if self.protocol in {Protocol.AirPlay, Protocol.Companion, Protocol.DMAP}:
|
if self.protocol in {Protocol.AirPlay, Protocol.Companion, Protocol.DMAP}:
|
||||||
pair_args["name"] = "Home Assistant"
|
pair_args["name"] = "Home Assistant"
|
||||||
if self.protocol == Protocol.DMAP:
|
if self.protocol == Protocol.DMAP:
|
||||||
@ -448,8 +479,11 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
return await self.async_step_pair_no_pin()
|
return await self.async_step_pair_no_pin()
|
||||||
|
|
||||||
async def async_step_protocol_disabled(self, user_input=None):
|
async def async_step_protocol_disabled(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Inform user that a protocol is disabled and cannot be paired."""
|
"""Inform user that a protocol is disabled and cannot be paired."""
|
||||||
|
assert self.protocol
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_pair_next_protocol()
|
return await self.async_pair_next_protocol()
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
@ -457,9 +491,13 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
description_placeholders={"protocol": protocol_str(self.protocol)},
|
description_placeholders={"protocol": protocol_str(self.protocol)},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_pair_with_pin(self, user_input=None):
|
async def async_step_pair_with_pin(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Handle pairing step where a PIN is required from the user."""
|
"""Handle pairing step where a PIN is required from the user."""
|
||||||
errors = {}
|
errors = {}
|
||||||
|
assert self.pairing
|
||||||
|
assert self.protocol
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
self.pairing.pin(user_input[CONF_PIN])
|
self.pairing.pin(user_input[CONF_PIN])
|
||||||
@ -480,8 +518,12 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
description_placeholders={"protocol": protocol_str(self.protocol)},
|
description_placeholders={"protocol": protocol_str(self.protocol)},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_pair_no_pin(self, user_input=None):
|
async def async_step_pair_no_pin(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Handle step where user has to enter a PIN on the device."""
|
"""Handle step where user has to enter a PIN on the device."""
|
||||||
|
assert self.pairing
|
||||||
|
assert self.protocol
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
await self.pairing.finish()
|
await self.pairing.finish()
|
||||||
if self.pairing.has_paired:
|
if self.pairing.has_paired:
|
||||||
@ -497,12 +539,15 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
step_id="pair_no_pin",
|
step_id="pair_no_pin",
|
||||||
description_placeholders={
|
description_placeholders={
|
||||||
"protocol": protocol_str(self.protocol),
|
"protocol": protocol_str(self.protocol),
|
||||||
"pin": pin,
|
"pin": str(pin),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_service_problem(self, user_input=None):
|
async def async_step_service_problem(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Inform user that a service will not be added."""
|
"""Inform user that a service will not be added."""
|
||||||
|
assert self.protocol
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_pair_next_protocol()
|
return await self.async_pair_next_protocol()
|
||||||
|
|
||||||
@ -511,8 +556,11 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
description_placeholders={"protocol": protocol_str(self.protocol)},
|
description_placeholders={"protocol": protocol_str(self.protocol)},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_step_password(self, user_input=None):
|
async def async_step_password(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
"""Inform user that password is not supported."""
|
"""Inform user that password is not supported."""
|
||||||
|
assert self.protocol
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
return await self.async_pair_next_protocol()
|
return await self.async_pair_next_protocol()
|
||||||
|
|
||||||
@ -521,18 +569,20 @@ class AppleTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
description_placeholders={"protocol": protocol_str(self.protocol)},
|
description_placeholders={"protocol": protocol_str(self.protocol)},
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_cleanup(self):
|
async def _async_cleanup(self) -> None:
|
||||||
"""Clean up allocated resources."""
|
"""Clean up allocated resources."""
|
||||||
if self.pairing is not None:
|
if self.pairing is not None:
|
||||||
await self.pairing.close()
|
await self.pairing.close()
|
||||||
self.pairing = None
|
self.pairing = None
|
||||||
|
|
||||||
async def _async_get_entry(self):
|
async def _async_get_entry(self) -> FlowResult:
|
||||||
"""Return config entry or update existing config entry."""
|
"""Return config entry or update existing config entry."""
|
||||||
# Abort if no protocols were paired
|
# Abort if no protocols were paired
|
||||||
if not self.credentials:
|
if not self.credentials:
|
||||||
return self.async_abort(reason="setup_failed")
|
return self.async_abort(reason="setup_failed")
|
||||||
|
|
||||||
|
assert self.atv
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
CONF_NAME: self.atv.name,
|
CONF_NAME: self.atv.name,
|
||||||
CONF_CREDENTIALS: self.credentials,
|
CONF_CREDENTIALS: self.credentials,
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"dependencies": ["zeroconf"],
|
"dependencies": ["zeroconf"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||||
|
"import_executor": true,
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyatv", "srptools"],
|
"loggers": ["pyatv", "srptools"],
|
||||||
"requirements": ["pyatv==0.14.3"],
|
"requirements": ["pyatv==0.14.3"],
|
||||||
|
@ -16,7 +16,15 @@ from pyatv.const import (
|
|||||||
ShuffleState,
|
ShuffleState,
|
||||||
)
|
)
|
||||||
from pyatv.helpers import is_streamable
|
from pyatv.helpers import is_streamable
|
||||||
from pyatv.interface import AppleTV, Playing
|
from pyatv.interface import (
|
||||||
|
AppleTV,
|
||||||
|
AudioListener,
|
||||||
|
OutputDevice,
|
||||||
|
Playing,
|
||||||
|
PowerListener,
|
||||||
|
PushListener,
|
||||||
|
PushUpdater,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.components import media_source
|
from homeassistant.components import media_source
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
@ -101,7 +109,9 @@ async def async_setup_entry(
|
|||||||
async_add_entities([AppleTvMediaPlayer(name, config_entry.unique_id, manager)])
|
async_add_entities([AppleTvMediaPlayer(name, config_entry.unique_id, manager)])
|
||||||
|
|
||||||
|
|
||||||
class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
class AppleTvMediaPlayer(
|
||||||
|
AppleTVEntity, MediaPlayerEntity, PowerListener, AudioListener, PushListener
|
||||||
|
):
|
||||||
"""Representation of an Apple TV media player."""
|
"""Representation of an Apple TV media player."""
|
||||||
|
|
||||||
_attr_supported_features = SUPPORT_APPLE_TV
|
_attr_supported_features = SUPPORT_APPLE_TV
|
||||||
@ -116,9 +126,9 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
def async_device_connected(self, atv: AppleTV) -> None:
|
def async_device_connected(self, atv: AppleTV) -> None:
|
||||||
"""Handle when connection is made to device."""
|
"""Handle when connection is made to device."""
|
||||||
# NB: Do not use _is_feature_available here as it only works when playing
|
# NB: Do not use _is_feature_available here as it only works when playing
|
||||||
if self.atv.features.in_state(FeatureState.Available, FeatureName.PushUpdates):
|
if atv.features.in_state(FeatureState.Available, FeatureName.PushUpdates):
|
||||||
self.atv.push_updater.listener = self
|
atv.push_updater.listener = self
|
||||||
self.atv.push_updater.start()
|
atv.push_updater.start()
|
||||||
|
|
||||||
self._attr_supported_features = SUPPORT_BASE
|
self._attr_supported_features = SUPPORT_BASE
|
||||||
|
|
||||||
@ -126,7 +136,7 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
# "Unsupported" are considered here as the state of such a feature can never
|
# "Unsupported" are considered here as the state of such a feature can never
|
||||||
# change after a connection has been established, i.e. an unsupported feature
|
# change after a connection has been established, i.e. an unsupported feature
|
||||||
# can never change to be supported.
|
# can never change to be supported.
|
||||||
all_features = self.atv.features.all_features()
|
all_features = atv.features.all_features()
|
||||||
for feature_name, support_flag in SUPPORT_FEATURE_MAPPING.items():
|
for feature_name, support_flag in SUPPORT_FEATURE_MAPPING.items():
|
||||||
feature_info = all_features.get(feature_name)
|
feature_info = all_features.get(feature_name)
|
||||||
if feature_info and feature_info.state != FeatureState.Unsupported:
|
if feature_info and feature_info.state != FeatureState.Unsupported:
|
||||||
@ -136,16 +146,18 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
# metadata update arrives (sometime very soon after this callback returns)
|
# metadata update arrives (sometime very soon after this callback returns)
|
||||||
|
|
||||||
# Listen to power updates
|
# Listen to power updates
|
||||||
self.atv.power.listener = self
|
atv.power.listener = self
|
||||||
|
|
||||||
# Listen to volume updates
|
# Listen to volume updates
|
||||||
self.atv.audio.listener = self
|
atv.audio.listener = self
|
||||||
|
|
||||||
if self.atv.features.in_state(FeatureState.Available, FeatureName.AppList):
|
if atv.features.in_state(FeatureState.Available, FeatureName.AppList):
|
||||||
self.hass.create_task(self._update_app_list())
|
self.hass.create_task(self._update_app_list())
|
||||||
|
|
||||||
async def _update_app_list(self) -> None:
|
async def _update_app_list(self) -> None:
|
||||||
_LOGGER.debug("Updating app list")
|
_LOGGER.debug("Updating app list")
|
||||||
|
if not self.atv:
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
apps = await self.atv.apps.app_list()
|
apps = await self.atv.apps.app_list()
|
||||||
except exceptions.NotSupportedError:
|
except exceptions.NotSupportedError:
|
||||||
@ -189,33 +201,56 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def playstatus_update(self, _, playing: Playing) -> None:
|
def playstatus_update(self, updater: PushUpdater, playstatus: Playing) -> None:
|
||||||
"""Print what is currently playing when it changes."""
|
"""Print what is currently playing when it changes.
|
||||||
self._playing = playing
|
|
||||||
|
This is a callback function from pyatv.interface.PushListener.
|
||||||
|
"""
|
||||||
|
self._playing = playstatus
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def playstatus_error(self, _, exception: Exception) -> None:
|
def playstatus_error(self, updater: PushUpdater, exception: Exception) -> None:
|
||||||
"""Inform about an error and restart push updates."""
|
"""Inform about an error and restart push updates.
|
||||||
|
|
||||||
|
This is a callback function from pyatv.interface.PushListener.
|
||||||
|
"""
|
||||||
_LOGGER.warning("A %s error occurred: %s", exception.__class__, exception)
|
_LOGGER.warning("A %s error occurred: %s", exception.__class__, exception)
|
||||||
self._playing = None
|
self._playing = None
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def powerstate_update(self, old_state: PowerState, new_state: PowerState) -> None:
|
def powerstate_update(self, old_state: PowerState, new_state: PowerState) -> None:
|
||||||
"""Update power state when it changes."""
|
"""Update power state when it changes.
|
||||||
|
|
||||||
|
This is a callback function from pyatv.interface.PowerListener.
|
||||||
|
"""
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def volume_update(self, old_level: float, new_level: float) -> None:
|
def volume_update(self, old_level: float, new_level: float) -> None:
|
||||||
"""Update volume when it changes."""
|
"""Update volume when it changes.
|
||||||
|
|
||||||
|
This is a callback function from pyatv.interface.AudioListener.
|
||||||
|
"""
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def outputdevices_update(
|
||||||
|
self, old_devices: list[OutputDevice], new_devices: list[OutputDevice]
|
||||||
|
) -> None:
|
||||||
|
"""Output devices were updated.
|
||||||
|
|
||||||
|
This is a callback function from pyatv.interface.AudioListener.
|
||||||
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def app_id(self) -> str | None:
|
def app_id(self) -> str | None:
|
||||||
"""ID of the current running app."""
|
"""ID of the current running app."""
|
||||||
if self._is_feature_available(FeatureName.App) and (
|
if (
|
||||||
app := self.atv.metadata.app
|
self.atv
|
||||||
|
and self._is_feature_available(FeatureName.App)
|
||||||
|
and (app := self.atv.metadata.app) is not None
|
||||||
):
|
):
|
||||||
return app.identifier
|
return app.identifier
|
||||||
return None
|
return None
|
||||||
@ -223,8 +258,10 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
@property
|
@property
|
||||||
def app_name(self) -> str | None:
|
def app_name(self) -> str | None:
|
||||||
"""Name of the current running app."""
|
"""Name of the current running app."""
|
||||||
if self._is_feature_available(FeatureName.App) and (
|
if (
|
||||||
app := self.atv.metadata.app
|
self.atv
|
||||||
|
and self._is_feature_available(FeatureName.App)
|
||||||
|
and (app := self.atv.metadata.app) is not None
|
||||||
):
|
):
|
||||||
return app.name
|
return app.name
|
||||||
return None
|
return None
|
||||||
@ -255,7 +292,7 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
@property
|
@property
|
||||||
def volume_level(self) -> float | None:
|
def volume_level(self) -> float | None:
|
||||||
"""Volume level of the media player (0..1)."""
|
"""Volume level of the media player (0..1)."""
|
||||||
if self._is_feature_available(FeatureName.Volume):
|
if self.atv and self._is_feature_available(FeatureName.Volume):
|
||||||
return self.atv.audio.volume / 100.0 # from percent
|
return self.atv.audio.volume / 100.0 # from percent
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -286,6 +323,8 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
"""Send the play_media command to the media player."""
|
"""Send the play_media command to the media player."""
|
||||||
# If input (file) has a file format supported by pyatv, then stream it with
|
# If input (file) has a file format supported by pyatv, then stream it with
|
||||||
# RAOP. Otherwise try to play it with regular AirPlay.
|
# RAOP. Otherwise try to play it with regular AirPlay.
|
||||||
|
if not self.atv:
|
||||||
|
return
|
||||||
if media_type in {MediaType.APP, MediaType.URL}:
|
if media_type in {MediaType.APP, MediaType.URL}:
|
||||||
await self.atv.apps.launch_app(media_id)
|
await self.atv.apps.launch_app(media_id)
|
||||||
return
|
return
|
||||||
@ -313,7 +352,8 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
"""Hash value for media image."""
|
"""Hash value for media image."""
|
||||||
state = self.state
|
state = self.state
|
||||||
if (
|
if (
|
||||||
self._playing
|
self.atv
|
||||||
|
and self._playing
|
||||||
and self._is_feature_available(FeatureName.Artwork)
|
and self._is_feature_available(FeatureName.Artwork)
|
||||||
and state not in {None, MediaPlayerState.OFF, MediaPlayerState.IDLE}
|
and state not in {None, MediaPlayerState.OFF, MediaPlayerState.IDLE}
|
||||||
):
|
):
|
||||||
@ -323,7 +363,11 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
async def async_get_media_image(self) -> tuple[bytes | None, str | None]:
|
async def async_get_media_image(self) -> tuple[bytes | None, str | None]:
|
||||||
"""Fetch media image of current playing image."""
|
"""Fetch media image of current playing image."""
|
||||||
state = self.state
|
state = self.state
|
||||||
if self._playing and state not in {MediaPlayerState.OFF, MediaPlayerState.IDLE}:
|
if (
|
||||||
|
self.atv
|
||||||
|
and self._playing
|
||||||
|
and state not in {MediaPlayerState.OFF, MediaPlayerState.IDLE}
|
||||||
|
):
|
||||||
artwork = await self.atv.metadata.artwork()
|
artwork = await self.atv.metadata.artwork()
|
||||||
if artwork:
|
if artwork:
|
||||||
return artwork.bytes, artwork.mimetype
|
return artwork.bytes, artwork.mimetype
|
||||||
@ -439,20 +483,24 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
|
|
||||||
async def async_turn_on(self) -> None:
|
async def async_turn_on(self) -> None:
|
||||||
"""Turn the media player on."""
|
"""Turn the media player on."""
|
||||||
if self._is_feature_available(FeatureName.TurnOn):
|
if self.atv and self._is_feature_available(FeatureName.TurnOn):
|
||||||
await self.atv.power.turn_on()
|
await self.atv.power.turn_on()
|
||||||
|
|
||||||
async def async_turn_off(self) -> None:
|
async def async_turn_off(self) -> None:
|
||||||
"""Turn the media player off."""
|
"""Turn the media player off."""
|
||||||
if (self._is_feature_available(FeatureName.TurnOff)) and (
|
if (
|
||||||
not self._is_feature_available(FeatureName.PowerState)
|
self.atv
|
||||||
or self.atv.power.power_state == PowerState.On
|
and (self._is_feature_available(FeatureName.TurnOff))
|
||||||
|
and (
|
||||||
|
not self._is_feature_available(FeatureName.PowerState)
|
||||||
|
or self.atv.power.power_state == PowerState.On
|
||||||
|
)
|
||||||
):
|
):
|
||||||
await self.atv.power.turn_off()
|
await self.atv.power.turn_off()
|
||||||
|
|
||||||
async def async_media_play_pause(self) -> None:
|
async def async_media_play_pause(self) -> None:
|
||||||
"""Pause media on media player."""
|
"""Pause media on media player."""
|
||||||
if self._playing:
|
if self.atv and self._playing:
|
||||||
await self.atv.remote_control.play_pause()
|
await self.atv.remote_control.play_pause()
|
||||||
|
|
||||||
async def async_media_play(self) -> None:
|
async def async_media_play(self) -> None:
|
||||||
@ -519,5 +567,6 @@ class AppleTvMediaPlayer(AppleTVEntity, MediaPlayerEntity):
|
|||||||
|
|
||||||
async def async_select_source(self, source: str) -> None:
|
async def async_select_source(self, source: str) -> None:
|
||||||
"""Select input source."""
|
"""Select input source."""
|
||||||
if app_id := self._app_list.get(source):
|
if self.atv:
|
||||||
await self.atv.apps.launch_app(app_id)
|
if app_id := self._app_list.get(source):
|
||||||
|
await self.atv.apps.launch_app(app_id)
|
||||||
|
@ -15,7 +15,7 @@ from homeassistant.const import CONF_NAME
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from . import AppleTVEntity
|
from . import AppleTVEntity, AppleTVManager
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -38,8 +38,10 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Load Apple TV remote based on a config entry."""
|
"""Load Apple TV remote based on a config entry."""
|
||||||
name = config_entry.data[CONF_NAME]
|
name: str = config_entry.data[CONF_NAME]
|
||||||
manager = hass.data[DOMAIN][config_entry.unique_id]
|
# apple_tv config entries always have a unique id
|
||||||
|
assert config_entry.unique_id is not None
|
||||||
|
manager: AppleTVManager = hass.data[DOMAIN][config_entry.unique_id]
|
||||||
async_add_entities([AppleTVRemote(name, config_entry.unique_id, manager)])
|
async_add_entities([AppleTVRemote(name, config_entry.unique_id, manager)])
|
||||||
|
|
||||||
|
|
||||||
@ -47,7 +49,7 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
|||||||
"""Device that sends commands to an Apple TV."""
|
"""Device that sends commands to an Apple TV."""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self):
|
def is_on(self) -> bool:
|
||||||
"""Return true if device is on."""
|
"""Return true if device is on."""
|
||||||
return self.atv is not None
|
return self.atv is not None
|
||||||
|
|
||||||
@ -64,13 +66,13 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
|||||||
num_repeats = kwargs[ATTR_NUM_REPEATS]
|
num_repeats = kwargs[ATTR_NUM_REPEATS]
|
||||||
delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
|
delay = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
|
||||||
|
|
||||||
if not self.is_on:
|
if not self.atv:
|
||||||
_LOGGER.error("Unable to send commands, not connected to %s", self.name)
|
_LOGGER.error("Unable to send commands, not connected to %s", self.name)
|
||||||
return
|
return
|
||||||
|
|
||||||
for _ in range(num_repeats):
|
for _ in range(num_repeats):
|
||||||
for single_command in command:
|
for single_command in command:
|
||||||
attr_value = None
|
attr_value: Any = None
|
||||||
if attributes := COMMAND_TO_ATTRIBUTE.get(single_command):
|
if attributes := COMMAND_TO_ATTRIBUTE.get(single_command):
|
||||||
attr_value = self.atv
|
attr_value = self.atv
|
||||||
for attr_name in attributes:
|
for attr_name in attributes:
|
||||||
@ -81,5 +83,5 @@ class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
|||||||
raise ValueError("Command not found. Exiting sequence")
|
raise ValueError("Command not found. Exiting sequence")
|
||||||
|
|
||||||
_LOGGER.info("Sending command %s", single_command)
|
_LOGGER.info("Sending command %s", single_command)
|
||||||
await attr_value() # type: ignore[operator]
|
await attr_value()
|
||||||
await asyncio.sleep(delay)
|
await asyncio.sleep(delay)
|
||||||
|
69
homeassistant/components/aprilaire/__init__.py
Normal file
69
homeassistant/components/aprilaire/__init__.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
"""The Aprilaire integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform
|
||||||
|
from homeassistant.core import Event, HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AprilaireCoordinator
|
||||||
|
|
||||||
|
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
|
"""Set up a config entry for Aprilaire."""
|
||||||
|
|
||||||
|
host = entry.data[CONF_HOST]
|
||||||
|
port = entry.data[CONF_PORT]
|
||||||
|
|
||||||
|
coordinator = AprilaireCoordinator(hass, entry.unique_id, host, port)
|
||||||
|
await coordinator.start_listen()
|
||||||
|
|
||||||
|
hass.data.setdefault(DOMAIN, {})[entry.unique_id] = coordinator
|
||||||
|
|
||||||
|
async def ready_callback(ready: bool):
|
||||||
|
if ready:
|
||||||
|
mac_address = format_mac(coordinator.data[Attribute.MAC_ADDRESS])
|
||||||
|
|
||||||
|
if mac_address != entry.unique_id:
|
||||||
|
raise ConfigEntryAuthFailed("Invalid MAC address")
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
async def _async_close(_: Event) -> None:
|
||||||
|
coordinator.stop_listen()
|
||||||
|
|
||||||
|
entry.async_on_unload(
|
||||||
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_close)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.error("Failed to wait for ready")
|
||||||
|
|
||||||
|
coordinator.stop_listen()
|
||||||
|
|
||||||
|
raise ConfigEntryNotReady()
|
||||||
|
|
||||||
|
await coordinator.wait_for_ready(ready_callback)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
if unload_ok:
|
||||||
|
coordinator: AprilaireCoordinator = hass.data[DOMAIN].pop(entry.unique_id)
|
||||||
|
coordinator.stop_listen()
|
||||||
|
|
||||||
|
return unload_ok
|
302
homeassistant/components/aprilaire/climate.py
Normal file
302
homeassistant/components/aprilaire/climate.py
Normal file
@ -0,0 +1,302 @@
|
|||||||
|
"""The Aprilaire climate component."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
|
from homeassistant.components.climate import (
|
||||||
|
FAN_AUTO,
|
||||||
|
FAN_ON,
|
||||||
|
PRESET_AWAY,
|
||||||
|
PRESET_NONE,
|
||||||
|
ClimateEntity,
|
||||||
|
ClimateEntityFeature,
|
||||||
|
HVACAction,
|
||||||
|
HVACMode,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
DOMAIN,
|
||||||
|
FAN_CIRCULATE,
|
||||||
|
PRESET_PERMANENT_HOLD,
|
||||||
|
PRESET_TEMPORARY_HOLD,
|
||||||
|
PRESET_VACATION,
|
||||||
|
)
|
||||||
|
from .coordinator import AprilaireCoordinator
|
||||||
|
from .entity import BaseAprilaireEntity
|
||||||
|
|
||||||
|
HVAC_MODE_MAP = {
|
||||||
|
1: HVACMode.OFF,
|
||||||
|
2: HVACMode.HEAT,
|
||||||
|
3: HVACMode.COOL,
|
||||||
|
4: HVACMode.HEAT,
|
||||||
|
5: HVACMode.AUTO,
|
||||||
|
}
|
||||||
|
|
||||||
|
HVAC_MODES_MAP = {
|
||||||
|
1: [HVACMode.OFF, HVACMode.HEAT],
|
||||||
|
2: [HVACMode.OFF, HVACMode.COOL],
|
||||||
|
3: [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL],
|
||||||
|
4: [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL],
|
||||||
|
5: [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL, HVACMode.AUTO],
|
||||||
|
6: [HVACMode.OFF, HVACMode.HEAT, HVACMode.COOL, HVACMode.AUTO],
|
||||||
|
}
|
||||||
|
|
||||||
|
PRESET_MODE_MAP = {
|
||||||
|
1: PRESET_TEMPORARY_HOLD,
|
||||||
|
2: PRESET_PERMANENT_HOLD,
|
||||||
|
3: PRESET_AWAY,
|
||||||
|
4: PRESET_VACATION,
|
||||||
|
}
|
||||||
|
|
||||||
|
FAN_MODE_MAP = {
|
||||||
|
1: FAN_ON,
|
||||||
|
2: FAN_AUTO,
|
||||||
|
3: FAN_CIRCULATE,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: ConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Add climates for passed config_entry in HA."""
|
||||||
|
|
||||||
|
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||||
|
|
||||||
|
async_add_entities([AprilaireClimate(coordinator, config_entry.unique_id)])
|
||||||
|
|
||||||
|
|
||||||
|
class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
||||||
|
"""Climate entity for Aprilaire."""
|
||||||
|
|
||||||
|
_attr_fan_modes = [FAN_AUTO, FAN_ON, FAN_CIRCULATE]
|
||||||
|
_attr_min_humidity = 10
|
||||||
|
_attr_max_humidity = 50
|
||||||
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
_attr_translation_key = "thermostat"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def precision(self) -> float:
|
||||||
|
"""Get the precision based on the unit."""
|
||||||
|
return (
|
||||||
|
PRECISION_HALVES
|
||||||
|
if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS
|
||||||
|
else PRECISION_WHOLE
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_features(self) -> ClimateEntityFeature:
|
||||||
|
"""Get supported features."""
|
||||||
|
features = 0
|
||||||
|
|
||||||
|
if self.coordinator.data.get(Attribute.MODE) == 5:
|
||||||
|
features = features | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||||
|
else:
|
||||||
|
features = features | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
|
||||||
|
if self.coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) == 2:
|
||||||
|
features = features | ClimateEntityFeature.TARGET_HUMIDITY
|
||||||
|
|
||||||
|
features = features | ClimateEntityFeature.PRESET_MODE
|
||||||
|
|
||||||
|
features = features | ClimateEntityFeature.FAN_MODE
|
||||||
|
|
||||||
|
return features
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_humidity(self) -> int | None:
|
||||||
|
"""Get current humidity."""
|
||||||
|
return self.coordinator.data.get(
|
||||||
|
Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_humidity(self) -> int | None:
|
||||||
|
"""Get current target humidity."""
|
||||||
|
return self.coordinator.data.get(Attribute.HUMIDIFICATION_SETPOINT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_mode(self) -> HVACMode | None:
|
||||||
|
"""Get HVAC mode."""
|
||||||
|
|
||||||
|
if mode := self.coordinator.data.get(Attribute.MODE):
|
||||||
|
if hvac_mode := HVAC_MODE_MAP.get(mode):
|
||||||
|
return hvac_mode
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_modes(self) -> list[HVACMode]:
|
||||||
|
"""Get supported HVAC modes."""
|
||||||
|
|
||||||
|
if modes := self.coordinator.data.get(Attribute.THERMOSTAT_MODES):
|
||||||
|
if thermostat_modes := HVAC_MODES_MAP.get(modes):
|
||||||
|
return thermostat_modes
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_action(self) -> HVACAction | None:
|
||||||
|
"""Get the current HVAC action."""
|
||||||
|
|
||||||
|
if self.coordinator.data.get(Attribute.HEATING_EQUIPMENT_STATUS, 0):
|
||||||
|
return HVACAction.HEATING
|
||||||
|
|
||||||
|
if self.coordinator.data.get(Attribute.COOLING_EQUIPMENT_STATUS, 0):
|
||||||
|
return HVACAction.COOLING
|
||||||
|
|
||||||
|
return HVACAction.IDLE
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float | None:
|
||||||
|
"""Get current temperature."""
|
||||||
|
return self.coordinator.data.get(
|
||||||
|
Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_VALUE
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature(self) -> float | None:
|
||||||
|
"""Get the target temperature."""
|
||||||
|
|
||||||
|
hvac_mode = self.hvac_mode
|
||||||
|
|
||||||
|
if hvac_mode == HVACMode.COOL:
|
||||||
|
return self.target_temperature_high
|
||||||
|
if hvac_mode == HVACMode.HEAT:
|
||||||
|
return self.target_temperature_low
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature_step(self) -> float | None:
|
||||||
|
"""Get the step for the target temperature based on the unit."""
|
||||||
|
return (
|
||||||
|
0.5
|
||||||
|
if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS
|
||||||
|
else 1
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature_high(self) -> float | None:
|
||||||
|
"""Get cool setpoint."""
|
||||||
|
return self.coordinator.data.get(Attribute.COOL_SETPOINT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature_low(self) -> float | None:
|
||||||
|
"""Get heat setpoint."""
|
||||||
|
return self.coordinator.data.get(Attribute.HEAT_SETPOINT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def preset_mode(self) -> str | None:
|
||||||
|
"""Get the current preset mode."""
|
||||||
|
if hold := self.coordinator.data.get(Attribute.HOLD):
|
||||||
|
if preset_mode := PRESET_MODE_MAP.get(hold):
|
||||||
|
return preset_mode
|
||||||
|
|
||||||
|
return PRESET_NONE
|
||||||
|
|
||||||
|
@property
|
||||||
|
def preset_modes(self) -> list[str] | None:
|
||||||
|
"""Get the supported preset modes."""
|
||||||
|
presets = [PRESET_NONE, PRESET_VACATION]
|
||||||
|
|
||||||
|
if self.coordinator.data.get(Attribute.AWAY_AVAILABLE) == 1:
|
||||||
|
presets.append(PRESET_AWAY)
|
||||||
|
|
||||||
|
hold = self.coordinator.data.get(Attribute.HOLD, 0)
|
||||||
|
|
||||||
|
if hold == 1:
|
||||||
|
presets.append(PRESET_TEMPORARY_HOLD)
|
||||||
|
elif hold == 2:
|
||||||
|
presets.append(PRESET_PERMANENT_HOLD)
|
||||||
|
|
||||||
|
return presets
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fan_mode(self) -> str | None:
|
||||||
|
"""Get fan mode."""
|
||||||
|
|
||||||
|
if mode := self.coordinator.data.get(Attribute.FAN_MODE):
|
||||||
|
if fan_mode := FAN_MODE_MAP.get(mode):
|
||||||
|
return fan_mode
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
|
"""Set new target temperature."""
|
||||||
|
|
||||||
|
cool_setpoint = 0
|
||||||
|
heat_setpoint = 0
|
||||||
|
|
||||||
|
if temperature := kwargs.get("temperature"):
|
||||||
|
if self.coordinator.data.get(Attribute.MODE) == 3:
|
||||||
|
cool_setpoint = temperature
|
||||||
|
else:
|
||||||
|
heat_setpoint = temperature
|
||||||
|
else:
|
||||||
|
if target_temp_low := kwargs.get("target_temp_low"):
|
||||||
|
heat_setpoint = target_temp_low
|
||||||
|
if target_temp_high := kwargs.get("target_temp_high"):
|
||||||
|
cool_setpoint = target_temp_high
|
||||||
|
|
||||||
|
if cool_setpoint == 0 and heat_setpoint == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
await self.coordinator.client.update_setpoint(cool_setpoint, heat_setpoint)
|
||||||
|
|
||||||
|
await self.coordinator.client.read_control()
|
||||||
|
|
||||||
|
async def async_set_humidity(self, humidity: int) -> None:
|
||||||
|
"""Set the target humidification setpoint."""
|
||||||
|
|
||||||
|
await self.coordinator.client.set_humidification_setpoint(humidity)
|
||||||
|
|
||||||
|
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||||
|
"""Set the fan mode."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
fan_mode_value_index = list(FAN_MODE_MAP.values()).index(fan_mode)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(f"Unsupported fan mode {fan_mode}") from exc
|
||||||
|
|
||||||
|
fan_mode_value = list(FAN_MODE_MAP.keys())[fan_mode_value_index]
|
||||||
|
|
||||||
|
await self.coordinator.client.update_fan_mode(fan_mode_value)
|
||||||
|
|
||||||
|
await self.coordinator.client.read_control()
|
||||||
|
|
||||||
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
|
"""Set the HVAC mode."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
mode_value_index = list(HVAC_MODE_MAP.values()).index(hvac_mode)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(f"Unsupported HVAC mode {hvac_mode}") from exc
|
||||||
|
|
||||||
|
mode_value = list(HVAC_MODE_MAP.keys())[mode_value_index]
|
||||||
|
|
||||||
|
await self.coordinator.client.update_mode(mode_value)
|
||||||
|
|
||||||
|
await self.coordinator.client.read_control()
|
||||||
|
|
||||||
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
|
"""Set the preset mode."""
|
||||||
|
|
||||||
|
if preset_mode == PRESET_AWAY:
|
||||||
|
await self.coordinator.client.set_hold(3)
|
||||||
|
elif preset_mode == PRESET_VACATION:
|
||||||
|
await self.coordinator.client.set_hold(4)
|
||||||
|
elif preset_mode == PRESET_NONE:
|
||||||
|
await self.coordinator.client.set_hold(0)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported preset mode {preset_mode}")
|
||||||
|
|
||||||
|
await self.coordinator.client.read_scheduling()
|
72
homeassistant/components/aprilaire/config_flow.py
Normal file
72
homeassistant/components/aprilaire/config_flow.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"""Config flow for the Aprilaire integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyaprilaire.const import Attribute
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant import config_entries
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||||
|
from homeassistant.data_entry_flow import FlowResult
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AprilaireCoordinator
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): str,
|
||||||
|
vol.Required(CONF_PORT, default=7000): cv.port,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Aprilaire."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
coordinator = AprilaireCoordinator(
|
||||||
|
self.hass, None, user_input[CONF_HOST], user_input[CONF_PORT]
|
||||||
|
)
|
||||||
|
await coordinator.start_listen()
|
||||||
|
|
||||||
|
async def ready_callback(ready: bool):
|
||||||
|
if not ready:
|
||||||
|
_LOGGER.error("Failed to wait for ready")
|
||||||
|
|
||||||
|
try:
|
||||||
|
ready = await coordinator.wait_for_ready(ready_callback)
|
||||||
|
finally:
|
||||||
|
coordinator.stop_listen()
|
||||||
|
|
||||||
|
mac_address = coordinator.data.get(Attribute.MAC_ADDRESS)
|
||||||
|
|
||||||
|
if ready and mac_address is not None:
|
||||||
|
await self.async_set_unique_id(format_mac(mac_address))
|
||||||
|
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
return self.async_create_entry(title="Aprilaire", data=user_input)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
errors={"base": "connection_failed"},
|
||||||
|
)
|
11
homeassistant/components/aprilaire/const.py
Normal file
11
homeassistant/components/aprilaire/const.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""Constants for the Aprilaire integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
DOMAIN = "aprilaire"
|
||||||
|
|
||||||
|
FAN_CIRCULATE = "Circulate"
|
||||||
|
|
||||||
|
PRESET_TEMPORARY_HOLD = "Temporary"
|
||||||
|
PRESET_PERMANENT_HOLD = "Permanent"
|
||||||
|
PRESET_VACATION = "Vacation"
|
209
homeassistant/components/aprilaire/coordinator.py
Normal file
209
homeassistant/components/aprilaire/coordinator.py
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
"""The Aprilaire coordinator."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
import logging
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import pyaprilaire.client
|
||||||
|
from pyaprilaire.const import MODELS, Attribute, FunctionalDomain
|
||||||
|
|
||||||
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
|
import homeassistant.helpers.device_registry as dr
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
RECONNECT_INTERVAL = 60 * 60
|
||||||
|
RETRY_CONNECTION_INTERVAL = 10
|
||||||
|
WAIT_TIMEOUT = 30
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||||
|
"""Coordinator for interacting with the thermostat."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
unique_id: str | None,
|
||||||
|
host: str,
|
||||||
|
port: int,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the coordinator."""
|
||||||
|
|
||||||
|
self.hass = hass
|
||||||
|
self.unique_id = unique_id
|
||||||
|
self.data: dict[str, Any] = {}
|
||||||
|
|
||||||
|
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}
|
||||||
|
|
||||||
|
self.client = pyaprilaire.client.AprilaireClient(
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
self.async_set_updated_data,
|
||||||
|
_LOGGER,
|
||||||
|
RECONNECT_INTERVAL,
|
||||||
|
RETRY_CONNECTION_INTERVAL,
|
||||||
|
)
|
||||||
|
|
||||||
|
if hasattr(self.client, "data") and self.client.data:
|
||||||
|
self.data = self.client.data
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_add_listener(
|
||||||
|
self, update_callback: CALLBACK_TYPE, context: Any = None
|
||||||
|
) -> Callable[[], None]:
|
||||||
|
"""Listen for data updates."""
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def remove_listener() -> None:
|
||||||
|
"""Remove update listener."""
|
||||||
|
self._listeners.pop(remove_listener)
|
||||||
|
|
||||||
|
self._listeners[remove_listener] = (update_callback, context)
|
||||||
|
|
||||||
|
return remove_listener
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_update_listeners(self) -> None:
|
||||||
|
"""Update all registered listeners."""
|
||||||
|
for update_callback, _ in list(self._listeners.values()):
|
||||||
|
update_callback()
|
||||||
|
|
||||||
|
def async_set_updated_data(self, data: Any) -> None:
|
||||||
|
"""Manually update data, notify listeners and reset refresh interval."""
|
||||||
|
|
||||||
|
old_device_info = self.create_device_info(self.data)
|
||||||
|
|
||||||
|
self.data = self.data | data
|
||||||
|
|
||||||
|
self.async_update_listeners()
|
||||||
|
|
||||||
|
new_device_info = self.create_device_info(data)
|
||||||
|
|
||||||
|
if (
|
||||||
|
old_device_info is not None
|
||||||
|
and new_device_info is not None
|
||||||
|
and old_device_info != new_device_info
|
||||||
|
):
|
||||||
|
device_registry = dr.async_get(self.hass)
|
||||||
|
|
||||||
|
device = device_registry.async_get_device(old_device_info["identifiers"])
|
||||||
|
|
||||||
|
if device is not None:
|
||||||
|
new_device_info.pop("identifiers", None)
|
||||||
|
new_device_info.pop("connections", None)
|
||||||
|
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device_id=device.id,
|
||||||
|
**new_device_info, # type: ignore[misc]
|
||||||
|
)
|
||||||
|
|
||||||
|
async def start_listen(self):
|
||||||
|
"""Start listening for data."""
|
||||||
|
await self.client.start_listen()
|
||||||
|
|
||||||
|
def stop_listen(self):
|
||||||
|
"""Stop listening for data."""
|
||||||
|
self.client.stop_listen()
|
||||||
|
|
||||||
|
async def wait_for_ready(
|
||||||
|
self, ready_callback: Callable[[bool], Awaitable[bool]]
|
||||||
|
) -> bool:
|
||||||
|
"""Wait for the client to be ready."""
|
||||||
|
|
||||||
|
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||||
|
data = await self.client.wait_for_response(
|
||||||
|
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
if not data or Attribute.MAC_ADDRESS not in data:
|
||||||
|
_LOGGER.error("Missing MAC address")
|
||||||
|
await ready_callback(False)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.data or Attribute.NAME not in self.data:
|
||||||
|
await self.client.wait_for_response(
|
||||||
|
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||||
|
await self.client.wait_for_response(
|
||||||
|
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not self.data
|
||||||
|
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||||
|
):
|
||||||
|
await self.client.wait_for_response(
|
||||||
|
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
|
await ready_callback(True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_name(self) -> str:
|
||||||
|
"""Get the name of the thermostat."""
|
||||||
|
|
||||||
|
return self.create_device_name(self.data)
|
||||||
|
|
||||||
|
def create_device_name(self, data: Optional[dict[str, Any]]) -> str:
|
||||||
|
"""Create the name of the thermostat."""
|
||||||
|
|
||||||
|
name = data.get(Attribute.NAME) if data else None
|
||||||
|
|
||||||
|
return name if name else "Aprilaire"
|
||||||
|
|
||||||
|
def get_hw_version(self, data: dict[str, Any]) -> str:
|
||||||
|
"""Get the hardware version."""
|
||||||
|
|
||||||
|
if hardware_revision := data.get(Attribute.HARDWARE_REVISION):
|
||||||
|
return (
|
||||||
|
f"Rev. {chr(hardware_revision)}"
|
||||||
|
if hardware_revision > ord("A")
|
||||||
|
else str(hardware_revision)
|
||||||
|
)
|
||||||
|
|
||||||
|
return "Unknown"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_info(self) -> DeviceInfo | None:
|
||||||
|
"""Get the device info for the thermostat."""
|
||||||
|
return self.create_device_info(self.data)
|
||||||
|
|
||||||
|
def create_device_info(self, data: dict[str, Any]) -> DeviceInfo | None:
|
||||||
|
"""Create the device info for the thermostat."""
|
||||||
|
|
||||||
|
if data is None or Attribute.MAC_ADDRESS not in data or self.unique_id is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self.unique_id)},
|
||||||
|
name=self.create_device_name(data),
|
||||||
|
manufacturer="Aprilaire",
|
||||||
|
)
|
||||||
|
|
||||||
|
model_number = data.get(Attribute.MODEL_NUMBER)
|
||||||
|
if model_number is not None:
|
||||||
|
device_info["model"] = MODELS.get(model_number, f"Unknown ({model_number})")
|
||||||
|
|
||||||
|
device_info["hw_version"] = self.get_hw_version(data)
|
||||||
|
|
||||||
|
firmware_major_revision = data.get(Attribute.FIRMWARE_MAJOR_REVISION)
|
||||||
|
firmware_minor_revision = data.get(Attribute.FIRMWARE_MINOR_REVISION)
|
||||||
|
if firmware_major_revision is not None:
|
||||||
|
device_info["sw_version"] = (
|
||||||
|
str(firmware_major_revision)
|
||||||
|
if firmware_minor_revision is None
|
||||||
|
else f"{firmware_major_revision}.{firmware_minor_revision:02}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return device_info
|
46
homeassistant/components/aprilaire/entity.py
Normal file
46
homeassistant/components/aprilaire/entity.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
"""Base functionality for Aprilaire entities."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
|
from homeassistant.helpers.update_coordinator import BaseCoordinatorEntity
|
||||||
|
|
||||||
|
from .coordinator import AprilaireCoordinator
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAprilaireEntity(BaseCoordinatorEntity[AprilaireCoordinator]):
|
||||||
|
"""Base for Aprilaire entities."""
|
||||||
|
|
||||||
|
_attr_available = False
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, coordinator: AprilaireCoordinator, unique_id: str | None
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the entity."""
|
||||||
|
|
||||||
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
self._attr_device_info = coordinator.device_info
|
||||||
|
self._attr_unique_id = f"{unique_id}_{self.translation_key}"
|
||||||
|
|
||||||
|
self._update_available()
|
||||||
|
|
||||||
|
def _update_available(self):
|
||||||
|
"""Update the entity availability."""
|
||||||
|
|
||||||
|
connected: bool = self.coordinator.data.get(
|
||||||
|
Attribute.CONNECTED, None
|
||||||
|
) or self.coordinator.data.get(Attribute.RECONNECTING, None)
|
||||||
|
|
||||||
|
stopped: bool = self.coordinator.data.get(Attribute.STOPPED, None)
|
||||||
|
|
||||||
|
self._attr_available = connected and not stopped
|
||||||
|
|
||||||
|
async def async_update(self) -> None:
|
||||||
|
"""Implement abstract base method."""
|
11
homeassistant/components/aprilaire/manifest.json
Normal file
11
homeassistant/components/aprilaire/manifest.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"domain": "aprilaire",
|
||||||
|
"name": "Aprilaire",
|
||||||
|
"codeowners": ["@chamberlain2007"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/aprilaire",
|
||||||
|
"integration_type": "device",
|
||||||
|
"iot_class": "local_push",
|
||||||
|
"loggers": ["pyaprilaire"],
|
||||||
|
"requirements": ["pyaprilaire==0.7.0"]
|
||||||
|
}
|
28
homeassistant/components/aprilaire/strings.json
Normal file
28
homeassistant/components/aprilaire/strings.json
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"port": "Usually 7000 or 8000"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"connection_failed": "Connection failed. Please check that the host and port is correct."
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"climate": {
|
||||||
|
"thermostat": {
|
||||||
|
"name": "Thermostat"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -83,7 +83,7 @@ async def _run_client(hass: HomeAssistant, client: Client, interval: float) -> N
|
|||||||
|
|
||||||
except ConnectionFailed:
|
except ConnectionFailed:
|
||||||
await asyncio.sleep(interval)
|
await asyncio.sleep(interval)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
continue
|
continue
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Unexpected exception, aborting arcam client")
|
_LOGGER.exception("Unexpected exception, aborting arcam client")
|
||||||
|
@ -325,9 +325,7 @@ class ArcamFmj(MediaPlayerEntity):
|
|||||||
def media_content_type(self) -> MediaType | None:
|
def media_content_type(self) -> MediaType | None:
|
||||||
"""Content type of current playing media."""
|
"""Content type of current playing media."""
|
||||||
source = self._state.get_source()
|
source = self._state.get_source()
|
||||||
if source == SourceCodes.DAB:
|
if source in (SourceCodes.DAB, SourceCodes.FM):
|
||||||
value = MediaType.MUSIC
|
|
||||||
elif source == SourceCodes.FM:
|
|
||||||
value = MediaType.MUSIC
|
value = MediaType.MUSIC
|
||||||
else:
|
else:
|
||||||
value = None
|
value = None
|
||||||
|
@ -83,6 +83,7 @@ async def async_pipeline_from_audio_stream(
|
|||||||
event_callback: PipelineEventCallback,
|
event_callback: PipelineEventCallback,
|
||||||
stt_metadata: stt.SpeechMetadata,
|
stt_metadata: stt.SpeechMetadata,
|
||||||
stt_stream: AsyncIterable[bytes],
|
stt_stream: AsyncIterable[bytes],
|
||||||
|
wake_word_phrase: str | None = None,
|
||||||
pipeline_id: str | None = None,
|
pipeline_id: str | None = None,
|
||||||
conversation_id: str | None = None,
|
conversation_id: str | None = None,
|
||||||
tts_audio_output: str | None = None,
|
tts_audio_output: str | None = None,
|
||||||
@ -101,6 +102,7 @@ async def async_pipeline_from_audio_stream(
|
|||||||
device_id=device_id,
|
device_id=device_id,
|
||||||
stt_metadata=stt_metadata,
|
stt_metadata=stt_metadata,
|
||||||
stt_stream=stt_stream,
|
stt_stream=stt_stream,
|
||||||
|
wake_word_phrase=wake_word_phrase,
|
||||||
run=PipelineRun(
|
run=PipelineRun(
|
||||||
hass,
|
hass,
|
||||||
context=context,
|
context=context,
|
||||||
|
@ -10,6 +10,6 @@ DEFAULT_WAKE_WORD_TIMEOUT = 3 # seconds
|
|||||||
CONF_DEBUG_RECORDING_DIR = "debug_recording_dir"
|
CONF_DEBUG_RECORDING_DIR = "debug_recording_dir"
|
||||||
|
|
||||||
DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up"
|
DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up"
|
||||||
DEFAULT_WAKE_WORD_COOLDOWN = 2 # seconds
|
WAKE_WORD_COOLDOWN = 2 # seconds
|
||||||
|
|
||||||
EVENT_RECORDING = f"{DOMAIN}_recording"
|
EVENT_RECORDING = f"{DOMAIN}_recording"
|
||||||
|
@ -38,6 +38,17 @@ class SpeechToTextError(PipelineError):
|
|||||||
"""Error in speech-to-text portion of pipeline."""
|
"""Error in speech-to-text portion of pipeline."""
|
||||||
|
|
||||||
|
|
||||||
|
class DuplicateWakeUpDetectedError(WakeWordDetectionError):
|
||||||
|
"""Error when multiple voice assistants wake up at the same time (same wake word)."""
|
||||||
|
|
||||||
|
def __init__(self, wake_up_phrase: str) -> None:
|
||||||
|
"""Set error message."""
|
||||||
|
super().__init__(
|
||||||
|
"duplicate_wake_up_detected",
|
||||||
|
f"Duplicate wake-up detected for {wake_up_phrase}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class IntentRecognitionError(PipelineError):
|
class IntentRecognitionError(PipelineError):
|
||||||
"""Error in intent recognition portion of pipeline."""
|
"""Error in intent recognition portion of pipeline."""
|
||||||
|
|
||||||
|
@ -55,10 +55,11 @@ from .const import (
|
|||||||
CONF_DEBUG_RECORDING_DIR,
|
CONF_DEBUG_RECORDING_DIR,
|
||||||
DATA_CONFIG,
|
DATA_CONFIG,
|
||||||
DATA_LAST_WAKE_UP,
|
DATA_LAST_WAKE_UP,
|
||||||
DEFAULT_WAKE_WORD_COOLDOWN,
|
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
WAKE_WORD_COOLDOWN,
|
||||||
)
|
)
|
||||||
from .error import (
|
from .error import (
|
||||||
|
DuplicateWakeUpDetectedError,
|
||||||
IntentRecognitionError,
|
IntentRecognitionError,
|
||||||
PipelineError,
|
PipelineError,
|
||||||
PipelineNotFound,
|
PipelineNotFound,
|
||||||
@ -453,9 +454,6 @@ class WakeWordSettings:
|
|||||||
audio_seconds_to_buffer: float = 0
|
audio_seconds_to_buffer: float = 0
|
||||||
"""Seconds of audio to buffer before detection and forward to STT."""
|
"""Seconds of audio to buffer before detection and forward to STT."""
|
||||||
|
|
||||||
cooldown_seconds: float = DEFAULT_WAKE_WORD_COOLDOWN
|
|
||||||
"""Seconds after a wake word detection where other detections are ignored."""
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class AudioSettings:
|
class AudioSettings:
|
||||||
@ -742,16 +740,22 @@ class PipelineRun:
|
|||||||
wake_word_output: dict[str, Any] = {}
|
wake_word_output: dict[str, Any] = {}
|
||||||
else:
|
else:
|
||||||
# Avoid duplicate detections by checking cooldown
|
# Avoid duplicate detections by checking cooldown
|
||||||
wake_up_key = f"{self.wake_word_entity_id}.{result.wake_word_id}"
|
last_wake_up = self.hass.data[DATA_LAST_WAKE_UP].get(
|
||||||
last_wake_up = self.hass.data[DATA_LAST_WAKE_UP].get(wake_up_key)
|
result.wake_word_phrase
|
||||||
|
)
|
||||||
if last_wake_up is not None:
|
if last_wake_up is not None:
|
||||||
sec_since_last_wake_up = time.monotonic() - last_wake_up
|
sec_since_last_wake_up = time.monotonic() - last_wake_up
|
||||||
if sec_since_last_wake_up < wake_word_settings.cooldown_seconds:
|
if sec_since_last_wake_up < WAKE_WORD_COOLDOWN:
|
||||||
_LOGGER.debug("Duplicate wake word detection occurred")
|
_LOGGER.debug(
|
||||||
raise WakeWordDetectionAborted
|
"Duplicate wake word detection occurred for %s",
|
||||||
|
result.wake_word_phrase,
|
||||||
|
)
|
||||||
|
raise DuplicateWakeUpDetectedError(result.wake_word_phrase)
|
||||||
|
|
||||||
# Record last wake up time to block duplicate detections
|
# Record last wake up time to block duplicate detections
|
||||||
self.hass.data[DATA_LAST_WAKE_UP][wake_up_key] = time.monotonic()
|
self.hass.data[DATA_LAST_WAKE_UP][
|
||||||
|
result.wake_word_phrase
|
||||||
|
] = time.monotonic()
|
||||||
|
|
||||||
if result.queued_audio:
|
if result.queued_audio:
|
||||||
# Add audio that was pending at detection.
|
# Add audio that was pending at detection.
|
||||||
@ -1308,6 +1312,9 @@ class PipelineInput:
|
|||||||
stt_stream: AsyncIterable[bytes] | None = None
|
stt_stream: AsyncIterable[bytes] | None = None
|
||||||
"""Input audio for stt. Required when start_stage = stt."""
|
"""Input audio for stt. Required when start_stage = stt."""
|
||||||
|
|
||||||
|
wake_word_phrase: str | None = None
|
||||||
|
"""Optional key used to de-duplicate wake-ups for local wake word detection."""
|
||||||
|
|
||||||
intent_input: str | None = None
|
intent_input: str | None = None
|
||||||
"""Input for conversation agent. Required when start_stage = intent."""
|
"""Input for conversation agent. Required when start_stage = intent."""
|
||||||
|
|
||||||
@ -1352,6 +1359,25 @@ class PipelineInput:
|
|||||||
assert self.stt_metadata is not None
|
assert self.stt_metadata is not None
|
||||||
assert stt_processed_stream is not None
|
assert stt_processed_stream is not None
|
||||||
|
|
||||||
|
if self.wake_word_phrase is not None:
|
||||||
|
# Avoid duplicate wake-ups by checking cooldown
|
||||||
|
last_wake_up = self.run.hass.data[DATA_LAST_WAKE_UP].get(
|
||||||
|
self.wake_word_phrase
|
||||||
|
)
|
||||||
|
if last_wake_up is not None:
|
||||||
|
sec_since_last_wake_up = time.monotonic() - last_wake_up
|
||||||
|
if sec_since_last_wake_up < WAKE_WORD_COOLDOWN:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Speech-to-text cancelled to avoid duplicate wake-up for %s",
|
||||||
|
self.wake_word_phrase,
|
||||||
|
)
|
||||||
|
raise DuplicateWakeUpDetectedError(self.wake_word_phrase)
|
||||||
|
|
||||||
|
# Record last wake up time to block duplicate detections
|
||||||
|
self.run.hass.data[DATA_LAST_WAKE_UP][
|
||||||
|
self.wake_word_phrase
|
||||||
|
] = time.monotonic()
|
||||||
|
|
||||||
stt_input_stream = stt_processed_stream
|
stt_input_stream = stt_processed_stream
|
||||||
|
|
||||||
if stt_audio_buffer:
|
if stt_audio_buffer:
|
||||||
|
@ -97,7 +97,12 @@ def async_register_websocket_api(hass: HomeAssistant) -> None:
|
|||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
),
|
),
|
||||||
PipelineStage.STT: vol.Schema(
|
PipelineStage.STT: vol.Schema(
|
||||||
{vol.Required("input"): {vol.Required("sample_rate"): int}},
|
{
|
||||||
|
vol.Required("input"): {
|
||||||
|
vol.Required("sample_rate"): int,
|
||||||
|
vol.Optional("wake_word_phrase"): str,
|
||||||
|
}
|
||||||
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
),
|
),
|
||||||
PipelineStage.INTENT: vol.Schema(
|
PipelineStage.INTENT: vol.Schema(
|
||||||
@ -149,12 +154,15 @@ async def websocket_run(
|
|||||||
msg_input = msg["input"]
|
msg_input = msg["input"]
|
||||||
audio_queue: asyncio.Queue[bytes] = asyncio.Queue()
|
audio_queue: asyncio.Queue[bytes] = asyncio.Queue()
|
||||||
incoming_sample_rate = msg_input["sample_rate"]
|
incoming_sample_rate = msg_input["sample_rate"]
|
||||||
|
wake_word_phrase: str | None = None
|
||||||
|
|
||||||
if start_stage == PipelineStage.WAKE_WORD:
|
if start_stage == PipelineStage.WAKE_WORD:
|
||||||
wake_word_settings = WakeWordSettings(
|
wake_word_settings = WakeWordSettings(
|
||||||
timeout=msg["input"].get("timeout", DEFAULT_WAKE_WORD_TIMEOUT),
|
timeout=msg["input"].get("timeout", DEFAULT_WAKE_WORD_TIMEOUT),
|
||||||
audio_seconds_to_buffer=msg_input.get("audio_seconds_to_buffer", 0),
|
audio_seconds_to_buffer=msg_input.get("audio_seconds_to_buffer", 0),
|
||||||
)
|
)
|
||||||
|
elif start_stage == PipelineStage.STT:
|
||||||
|
wake_word_phrase = msg["input"].get("wake_word_phrase")
|
||||||
|
|
||||||
async def stt_stream() -> AsyncGenerator[bytes, None]:
|
async def stt_stream() -> AsyncGenerator[bytes, None]:
|
||||||
state = None
|
state = None
|
||||||
@ -189,6 +197,7 @@ async def websocket_run(
|
|||||||
channel=stt.AudioChannels.CHANNEL_MONO,
|
channel=stt.AudioChannels.CHANNEL_MONO,
|
||||||
)
|
)
|
||||||
input_args["stt_stream"] = stt_stream()
|
input_args["stt_stream"] = stt_stream()
|
||||||
|
input_args["wake_word_phrase"] = wake_word_phrase
|
||||||
|
|
||||||
# Audio settings
|
# Audio settings
|
||||||
audio_settings = AudioSettings(
|
audio_settings = AudioSettings(
|
||||||
@ -241,7 +250,7 @@ async def websocket_run(
|
|||||||
# Task contains a timeout
|
# Task contains a timeout
|
||||||
async with asyncio.timeout(timeout):
|
async with asyncio.timeout(timeout):
|
||||||
await run_task
|
await run_task
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
pipeline_input.run.process_event(
|
pipeline_input.run.process_event(
|
||||||
PipelineEvent(
|
PipelineEvent(
|
||||||
PipelineEventType.ERROR,
|
PipelineEventType.ERROR,
|
||||||
@ -487,7 +496,7 @@ async def websocket_device_capture(
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with contextlib.suppress(asyncio.TimeoutError):
|
with contextlib.suppress(TimeoutError):
|
||||||
async with asyncio.timeout(timeout_seconds):
|
async with asyncio.timeout(timeout_seconds):
|
||||||
while True:
|
while True:
|
||||||
# Send audio chunks encoded as base64
|
# Send audio chunks encoded as base64
|
||||||
|
@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers import discovery
|
from homeassistant.helpers import discovery
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect
|
||||||
|
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -50,6 +51,21 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
password: str = conf[CONF_PASSWORD]
|
password: str = conf[CONF_PASSWORD]
|
||||||
|
|
||||||
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
|
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
|
||||||
|
create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
"deprecated_integration",
|
||||||
|
breaks_in_ha_version="2024.9.0",
|
||||||
|
is_fixable=False,
|
||||||
|
issue_domain=DOMAIN,
|
||||||
|
severity=IssueSeverity.WARNING,
|
||||||
|
translation_key="deprecated_integration",
|
||||||
|
translation_placeholders={
|
||||||
|
"domain": DOMAIN,
|
||||||
|
"integration_title": "Asterisk Voicemail",
|
||||||
|
"mailbox": "mailbox",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
8
homeassistant/components/asterisk_mbox/strings.json
Normal file
8
homeassistant/components/asterisk_mbox/strings.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"issues": {
|
||||||
|
"deprecated_integration": {
|
||||||
|
"title": "The {integration_title} is being removed",
|
||||||
|
"description": "{integration_title} is being removed as the `{mailbox}` platform is being removed and {integration_title} supports no other platforms. Remove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -211,10 +211,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
|||||||
|
|
||||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
"""Get list of connected devices."""
|
"""Get list of connected devices."""
|
||||||
try:
|
api_devices = await self._api.async_get_connected_devices()
|
||||||
api_devices = await self._api.async_get_connected_devices()
|
|
||||||
except OSError as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
return {
|
return {
|
||||||
format_mac(mac): WrtDevice(dev.ip, dev.name, None)
|
format_mac(mac): WrtDevice(dev.ip, dev.name, None)
|
||||||
for mac, dev in api_devices.items()
|
for mac, dev in api_devices.items()
|
||||||
@ -343,10 +340,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
|
|
||||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
"""Get list of connected devices."""
|
"""Get list of connected devices."""
|
||||||
try:
|
api_devices = await self._api.async_get_connected_devices()
|
||||||
api_devices = await self._api.async_get_connected_devices()
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
return {
|
return {
|
||||||
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
|
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
|
||||||
for mac, dev in api_devices.items()
|
for mac, dev in api_devices.items()
|
||||||
|
@ -216,7 +216,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
return error, None
|
return error, None
|
||||||
|
|
||||||
_LOGGER.info(
|
_LOGGER.debug(
|
||||||
"Successfully connected to the AsusWrt router at %s using protocol %s",
|
"Successfully connected to the AsusWrt router at %s using protocol %s",
|
||||||
host,
|
host,
|
||||||
protocol,
|
protocol,
|
||||||
|
@ -20,7 +20,7 @@ from homeassistant.helpers import entity_registry as er
|
|||||||
from homeassistant.helpers.device_registry import DeviceInfo, format_mac
|
from homeassistant.helpers.device_registry import DeviceInfo, format_mac
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
from homeassistant.helpers.event import async_track_time_interval
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
from homeassistant.util import dt as dt_util, slugify
|
from homeassistant.util import dt as dt_util, slugify
|
||||||
|
|
||||||
from .bridge import AsusWrtBridge, WrtDevice
|
from .bridge import AsusWrtBridge, WrtDevice
|
||||||
@ -276,7 +276,7 @@ class AsusWrtRouter:
|
|||||||
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
||||||
try:
|
try:
|
||||||
wrt_devices = await self._api.async_get_connected_devices()
|
wrt_devices = await self._api.async_get_connected_devices()
|
||||||
except UpdateFailed as exc:
|
except (OSError, AsusWrtError) as exc:
|
||||||
if not self._connect_error:
|
if not self._connect_error:
|
||||||
self._connect_error = True
|
self._connect_error = True
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@ -59,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return await async_setup_august(hass, entry, august_gateway)
|
return await async_setup_august(hass, entry, august_gateway)
|
||||||
except (RequireValidation, InvalidAuth) as err:
|
except (RequireValidation, InvalidAuth) as err:
|
||||||
raise ConfigEntryAuthFailed from err
|
raise ConfigEntryAuthFailed from err
|
||||||
except asyncio.TimeoutError as err:
|
except TimeoutError as err:
|
||||||
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
raise ConfigEntryNotReady("Timed out connecting to august api") from err
|
||||||
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
|
except (AugustApiAIOHTTPError, ClientResponseError, CannotConnect) as err:
|
||||||
raise ConfigEntryNotReady from err
|
raise ConfigEntryNotReady from err
|
||||||
@ -233,7 +233,7 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
return_exceptions=True,
|
return_exceptions=True,
|
||||||
):
|
):
|
||||||
if isinstance(result, Exception) and not isinstance(
|
if isinstance(result, Exception) and not isinstance(
|
||||||
result, (asyncio.TimeoutError, ClientResponseError, CannotConnect)
|
result, (TimeoutError, ClientResponseError, CannotConnect)
|
||||||
):
|
):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Unexpected exception during initial sync: %s",
|
"Unexpected exception during initial sync: %s",
|
||||||
@ -293,7 +293,7 @@ class AugustData(AugustSubscriberMixin):
|
|||||||
for device_id in device_ids_list:
|
for device_id in device_ids_list:
|
||||||
try:
|
try:
|
||||||
await self._async_refresh_device_detail_by_id(device_id)
|
await self._async_refresh_device_detail_by_id(device_id)
|
||||||
except asyncio.TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Timed out calling august api during refresh of device: %s",
|
"Timed out calling august api during refresh of device: %s",
|
||||||
device_id,
|
device_id,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Consume the august activity stream."""
|
"""Consume the august activity stream."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
@ -63,11 +62,10 @@ class ActivityStream(AugustSubscriberMixin):
|
|||||||
self._update_debounce: dict[str, Debouncer] = {}
|
self._update_debounce: dict[str, Debouncer] = {}
|
||||||
self._update_debounce_jobs: dict[str, HassJob] = {}
|
self._update_debounce_jobs: dict[str, HassJob] = {}
|
||||||
|
|
||||||
async def _async_update_house_id_later(
|
@callback
|
||||||
self, debouncer: Debouncer, _: datetime
|
def _async_update_house_id_later(self, debouncer: Debouncer, _: datetime) -> None:
|
||||||
) -> None:
|
|
||||||
"""Call a debouncer from async_call_later."""
|
"""Call a debouncer from async_call_later."""
|
||||||
await debouncer.async_call()
|
debouncer.async_schedule_call()
|
||||||
|
|
||||||
async def async_setup(self) -> None:
|
async def async_setup(self) -> None:
|
||||||
"""Token refresh check and catch up the activity stream."""
|
"""Token refresh check and catch up the activity stream."""
|
||||||
@ -128,9 +126,9 @@ class ActivityStream(AugustSubscriberMixin):
|
|||||||
_LOGGER.debug("Skipping update because pubnub is connected")
|
_LOGGER.debug("Skipping update because pubnub is connected")
|
||||||
return
|
return
|
||||||
_LOGGER.debug("Start retrieving device activities")
|
_LOGGER.debug("Start retrieving device activities")
|
||||||
await asyncio.gather(
|
# Await in sequence to avoid hammering the API
|
||||||
*(debouncer.async_call() for debouncer in self._update_debounce.values())
|
for debouncer in self._update_debounce.values():
|
||||||
)
|
await debouncer.async_call()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_schedule_house_id_refresh(self, house_id: str) -> None:
|
def async_schedule_house_id_refresh(self, house_id: str) -> None:
|
||||||
@ -139,7 +137,7 @@ class ActivityStream(AugustSubscriberMixin):
|
|||||||
_async_cancel_future_scheduled_updates(future_updates)
|
_async_cancel_future_scheduled_updates(future_updates)
|
||||||
|
|
||||||
debouncer = self._update_debounce[house_id]
|
debouncer = self._update_debounce[house_id]
|
||||||
self._hass.async_create_task(debouncer.async_call())
|
debouncer.async_schedule_call()
|
||||||
# Schedule two updates past the debounce time
|
# Schedule two updates past the debounce time
|
||||||
# to ensure we catch the case where the activity
|
# to ensure we catch the case where the activity
|
||||||
# api does not update right away and we need to poll
|
# api does not update right away and we need to poll
|
||||||
|
@ -26,7 +26,8 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
|
"import_executor": true,
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==1.11.2", "yalexs-ble==2.4.1"]
|
"requirements": ["yalexs==1.11.4", "yalexs-ble==2.4.2"]
|
||||||
}
|
}
|
||||||
|
@ -43,12 +43,17 @@ class AugustSubscriberMixin:
|
|||||||
async def _async_refresh(self, time: datetime) -> None:
|
async def _async_refresh(self, time: datetime) -> None:
|
||||||
"""Refresh data."""
|
"""Refresh data."""
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_scheduled_refresh(self, now: datetime) -> None:
|
||||||
|
"""Call the refresh method."""
|
||||||
|
self._hass.async_create_task(self._async_refresh(now), eager_start=True)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_setup_listeners(self) -> None:
|
def _async_setup_listeners(self) -> None:
|
||||||
"""Create interval and stop listeners."""
|
"""Create interval and stop listeners."""
|
||||||
self._unsub_interval = async_track_time_interval(
|
self._unsub_interval = async_track_time_interval(
|
||||||
self._hass,
|
self._hass,
|
||||||
self._async_refresh,
|
self._async_scheduled_refresh,
|
||||||
self._update_interval,
|
self._update_interval,
|
||||||
name="august refresh",
|
name="august refresh",
|
||||||
)
|
)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user