mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 20:27:08 +00:00
2023.12.0 (#105135)
This commit is contained in:
commit
ea1222bff3
38
.coveragerc
38
.coveragerc
@ -67,9 +67,6 @@ omit =
|
|||||||
homeassistant/components/android_ip_webcam/switch.py
|
homeassistant/components/android_ip_webcam/switch.py
|
||||||
homeassistant/components/anel_pwrctrl/switch.py
|
homeassistant/components/anel_pwrctrl/switch.py
|
||||||
homeassistant/components/anthemav/media_player.py
|
homeassistant/components/anthemav/media_player.py
|
||||||
homeassistant/components/apcupsd/__init__.py
|
|
||||||
homeassistant/components/apcupsd/binary_sensor.py
|
|
||||||
homeassistant/components/apcupsd/sensor.py
|
|
||||||
homeassistant/components/apple_tv/__init__.py
|
homeassistant/components/apple_tv/__init__.py
|
||||||
homeassistant/components/apple_tv/browse_media.py
|
homeassistant/components/apple_tv/browse_media.py
|
||||||
homeassistant/components/apple_tv/media_player.py
|
homeassistant/components/apple_tv/media_player.py
|
||||||
@ -123,6 +120,7 @@ omit =
|
|||||||
homeassistant/components/blink/binary_sensor.py
|
homeassistant/components/blink/binary_sensor.py
|
||||||
homeassistant/components/blink/camera.py
|
homeassistant/components/blink/camera.py
|
||||||
homeassistant/components/blink/sensor.py
|
homeassistant/components/blink/sensor.py
|
||||||
|
homeassistant/components/blink/switch.py
|
||||||
homeassistant/components/blinksticklight/light.py
|
homeassistant/components/blinksticklight/light.py
|
||||||
homeassistant/components/blockchain/sensor.py
|
homeassistant/components/blockchain/sensor.py
|
||||||
homeassistant/components/bloomsky/*
|
homeassistant/components/bloomsky/*
|
||||||
@ -144,6 +142,7 @@ omit =
|
|||||||
homeassistant/components/braviatv/coordinator.py
|
homeassistant/components/braviatv/coordinator.py
|
||||||
homeassistant/components/braviatv/media_player.py
|
homeassistant/components/braviatv/media_player.py
|
||||||
homeassistant/components/braviatv/remote.py
|
homeassistant/components/braviatv/remote.py
|
||||||
|
homeassistant/components/broadlink/climate.py
|
||||||
homeassistant/components/broadlink/light.py
|
homeassistant/components/broadlink/light.py
|
||||||
homeassistant/components/broadlink/remote.py
|
homeassistant/components/broadlink/remote.py
|
||||||
homeassistant/components/broadlink/switch.py
|
homeassistant/components/broadlink/switch.py
|
||||||
@ -216,9 +215,6 @@ omit =
|
|||||||
homeassistant/components/discogs/sensor.py
|
homeassistant/components/discogs/sensor.py
|
||||||
homeassistant/components/discord/__init__.py
|
homeassistant/components/discord/__init__.py
|
||||||
homeassistant/components/discord/notify.py
|
homeassistant/components/discord/notify.py
|
||||||
homeassistant/components/discovergy/__init__.py
|
|
||||||
homeassistant/components/discovergy/sensor.py
|
|
||||||
homeassistant/components/discovergy/coordinator.py
|
|
||||||
homeassistant/components/dlib_face_detect/image_processing.py
|
homeassistant/components/dlib_face_detect/image_processing.py
|
||||||
homeassistant/components/dlib_face_identify/image_processing.py
|
homeassistant/components/dlib_face_identify/image_processing.py
|
||||||
homeassistant/components/dlink/data.py
|
homeassistant/components/dlink/data.py
|
||||||
@ -338,7 +334,6 @@ omit =
|
|||||||
homeassistant/components/epson/__init__.py
|
homeassistant/components/epson/__init__.py
|
||||||
homeassistant/components/epson/media_player.py
|
homeassistant/components/epson/media_player.py
|
||||||
homeassistant/components/epsonworkforce/sensor.py
|
homeassistant/components/epsonworkforce/sensor.py
|
||||||
homeassistant/components/eq3btsmart/climate.py
|
|
||||||
homeassistant/components/escea/__init__.py
|
homeassistant/components/escea/__init__.py
|
||||||
homeassistant/components/escea/climate.py
|
homeassistant/components/escea/climate.py
|
||||||
homeassistant/components/escea/discovery.py
|
homeassistant/components/escea/discovery.py
|
||||||
@ -369,7 +364,8 @@ omit =
|
|||||||
homeassistant/components/faa_delays/binary_sensor.py
|
homeassistant/components/faa_delays/binary_sensor.py
|
||||||
homeassistant/components/faa_delays/coordinator.py
|
homeassistant/components/faa_delays/coordinator.py
|
||||||
homeassistant/components/familyhub/camera.py
|
homeassistant/components/familyhub/camera.py
|
||||||
homeassistant/components/fastdotcom/*
|
homeassistant/components/fastdotcom/sensor.py
|
||||||
|
homeassistant/components/fastdotcom/__init__.py
|
||||||
homeassistant/components/ffmpeg/camera.py
|
homeassistant/components/ffmpeg/camera.py
|
||||||
homeassistant/components/fibaro/__init__.py
|
homeassistant/components/fibaro/__init__.py
|
||||||
homeassistant/components/fibaro/binary_sensor.py
|
homeassistant/components/fibaro/binary_sensor.py
|
||||||
@ -426,9 +422,7 @@ omit =
|
|||||||
homeassistant/components/foursquare/*
|
homeassistant/components/foursquare/*
|
||||||
homeassistant/components/free_mobile/notify.py
|
homeassistant/components/free_mobile/notify.py
|
||||||
homeassistant/components/freebox/camera.py
|
homeassistant/components/freebox/camera.py
|
||||||
homeassistant/components/freebox/device_tracker.py
|
|
||||||
homeassistant/components/freebox/home_base.py
|
homeassistant/components/freebox/home_base.py
|
||||||
homeassistant/components/freebox/router.py
|
|
||||||
homeassistant/components/freebox/switch.py
|
homeassistant/components/freebox/switch.py
|
||||||
homeassistant/components/fritz/common.py
|
homeassistant/components/fritz/common.py
|
||||||
homeassistant/components/fritz/device_tracker.py
|
homeassistant/components/fritz/device_tracker.py
|
||||||
@ -769,9 +763,6 @@ omit =
|
|||||||
homeassistant/components/mutesync/binary_sensor.py
|
homeassistant/components/mutesync/binary_sensor.py
|
||||||
homeassistant/components/mvglive/sensor.py
|
homeassistant/components/mvglive/sensor.py
|
||||||
homeassistant/components/mycroft/*
|
homeassistant/components/mycroft/*
|
||||||
homeassistant/components/myq/__init__.py
|
|
||||||
homeassistant/components/myq/cover.py
|
|
||||||
homeassistant/components/myq/light.py
|
|
||||||
homeassistant/components/mysensors/__init__.py
|
homeassistant/components/mysensors/__init__.py
|
||||||
homeassistant/components/mysensors/climate.py
|
homeassistant/components/mysensors/climate.py
|
||||||
homeassistant/components/mysensors/cover.py
|
homeassistant/components/mysensors/cover.py
|
||||||
@ -822,7 +813,6 @@ omit =
|
|||||||
homeassistant/components/nfandroidtv/__init__.py
|
homeassistant/components/nfandroidtv/__init__.py
|
||||||
homeassistant/components/nfandroidtv/notify.py
|
homeassistant/components/nfandroidtv/notify.py
|
||||||
homeassistant/components/nibe_heatpump/__init__.py
|
homeassistant/components/nibe_heatpump/__init__.py
|
||||||
homeassistant/components/nibe_heatpump/climate.py
|
|
||||||
homeassistant/components/nibe_heatpump/binary_sensor.py
|
homeassistant/components/nibe_heatpump/binary_sensor.py
|
||||||
homeassistant/components/nibe_heatpump/select.py
|
homeassistant/components/nibe_heatpump/select.py
|
||||||
homeassistant/components/nibe_heatpump/sensor.py
|
homeassistant/components/nibe_heatpump/sensor.py
|
||||||
@ -837,6 +827,7 @@ omit =
|
|||||||
homeassistant/components/noaa_tides/sensor.py
|
homeassistant/components/noaa_tides/sensor.py
|
||||||
homeassistant/components/nobo_hub/__init__.py
|
homeassistant/components/nobo_hub/__init__.py
|
||||||
homeassistant/components/nobo_hub/climate.py
|
homeassistant/components/nobo_hub/climate.py
|
||||||
|
homeassistant/components/nobo_hub/select.py
|
||||||
homeassistant/components/nobo_hub/sensor.py
|
homeassistant/components/nobo_hub/sensor.py
|
||||||
homeassistant/components/norway_air/air_quality.py
|
homeassistant/components/norway_air/air_quality.py
|
||||||
homeassistant/components/notify_events/notify.py
|
homeassistant/components/notify_events/notify.py
|
||||||
@ -937,6 +928,9 @@ omit =
|
|||||||
homeassistant/components/panasonic_viera/media_player.py
|
homeassistant/components/panasonic_viera/media_player.py
|
||||||
homeassistant/components/pandora/media_player.py
|
homeassistant/components/pandora/media_player.py
|
||||||
homeassistant/components/pencom/switch.py
|
homeassistant/components/pencom/switch.py
|
||||||
|
homeassistant/components/permobil/__init__.py
|
||||||
|
homeassistant/components/permobil/coordinator.py
|
||||||
|
homeassistant/components/permobil/sensor.py
|
||||||
homeassistant/components/philips_js/__init__.py
|
homeassistant/components/philips_js/__init__.py
|
||||||
homeassistant/components/philips_js/light.py
|
homeassistant/components/philips_js/light.py
|
||||||
homeassistant/components/philips_js/media_player.py
|
homeassistant/components/philips_js/media_player.py
|
||||||
@ -950,8 +944,6 @@ omit =
|
|||||||
homeassistant/components/pilight/light.py
|
homeassistant/components/pilight/light.py
|
||||||
homeassistant/components/pilight/switch.py
|
homeassistant/components/pilight/switch.py
|
||||||
homeassistant/components/ping/__init__.py
|
homeassistant/components/ping/__init__.py
|
||||||
homeassistant/components/ping/binary_sensor.py
|
|
||||||
homeassistant/components/ping/device_tracker.py
|
|
||||||
homeassistant/components/ping/helpers.py
|
homeassistant/components/ping/helpers.py
|
||||||
homeassistant/components/pioneer/media_player.py
|
homeassistant/components/pioneer/media_player.py
|
||||||
homeassistant/components/plaato/__init__.py
|
homeassistant/components/plaato/__init__.py
|
||||||
@ -1069,6 +1061,7 @@ omit =
|
|||||||
homeassistant/components/roomba/sensor.py
|
homeassistant/components/roomba/sensor.py
|
||||||
homeassistant/components/roomba/vacuum.py
|
homeassistant/components/roomba/vacuum.py
|
||||||
homeassistant/components/roon/__init__.py
|
homeassistant/components/roon/__init__.py
|
||||||
|
homeassistant/components/roon/event.py
|
||||||
homeassistant/components/roon/media_browser.py
|
homeassistant/components/roon/media_browser.py
|
||||||
homeassistant/components/roon/media_player.py
|
homeassistant/components/roon/media_player.py
|
||||||
homeassistant/components/roon/server.py
|
homeassistant/components/roon/server.py
|
||||||
@ -1132,10 +1125,7 @@ omit =
|
|||||||
homeassistant/components/sky_hub/*
|
homeassistant/components/sky_hub/*
|
||||||
homeassistant/components/skybeacon/sensor.py
|
homeassistant/components/skybeacon/sensor.py
|
||||||
homeassistant/components/skybell/__init__.py
|
homeassistant/components/skybell/__init__.py
|
||||||
homeassistant/components/skybell/binary_sensor.py
|
|
||||||
homeassistant/components/skybell/camera.py
|
homeassistant/components/skybell/camera.py
|
||||||
homeassistant/components/skybell/coordinator.py
|
|
||||||
homeassistant/components/skybell/entity.py
|
|
||||||
homeassistant/components/skybell/light.py
|
homeassistant/components/skybell/light.py
|
||||||
homeassistant/components/skybell/sensor.py
|
homeassistant/components/skybell/sensor.py
|
||||||
homeassistant/components/skybell/switch.py
|
homeassistant/components/skybell/switch.py
|
||||||
@ -1291,9 +1281,11 @@ omit =
|
|||||||
homeassistant/components/system_bridge/__init__.py
|
homeassistant/components/system_bridge/__init__.py
|
||||||
homeassistant/components/system_bridge/binary_sensor.py
|
homeassistant/components/system_bridge/binary_sensor.py
|
||||||
homeassistant/components/system_bridge/coordinator.py
|
homeassistant/components/system_bridge/coordinator.py
|
||||||
|
homeassistant/components/system_bridge/entity.py
|
||||||
homeassistant/components/system_bridge/media_player.py
|
homeassistant/components/system_bridge/media_player.py
|
||||||
homeassistant/components/system_bridge/notify.py
|
homeassistant/components/system_bridge/notify.py
|
||||||
homeassistant/components/system_bridge/sensor.py
|
homeassistant/components/system_bridge/sensor.py
|
||||||
|
homeassistant/components/system_bridge/update.py
|
||||||
homeassistant/components/systemmonitor/sensor.py
|
homeassistant/components/systemmonitor/sensor.py
|
||||||
homeassistant/components/tado/__init__.py
|
homeassistant/components/tado/__init__.py
|
||||||
homeassistant/components/tado/binary_sensor.py
|
homeassistant/components/tado/binary_sensor.py
|
||||||
@ -1431,6 +1423,13 @@ omit =
|
|||||||
homeassistant/components/upnp/device.py
|
homeassistant/components/upnp/device.py
|
||||||
homeassistant/components/upnp/sensor.py
|
homeassistant/components/upnp/sensor.py
|
||||||
homeassistant/components/vasttrafik/sensor.py
|
homeassistant/components/vasttrafik/sensor.py
|
||||||
|
homeassistant/components/v2c/__init__.py
|
||||||
|
homeassistant/components/v2c/binary_sensor.py
|
||||||
|
homeassistant/components/v2c/coordinator.py
|
||||||
|
homeassistant/components/v2c/entity.py
|
||||||
|
homeassistant/components/v2c/number.py
|
||||||
|
homeassistant/components/v2c/sensor.py
|
||||||
|
homeassistant/components/v2c/switch.py
|
||||||
homeassistant/components/velbus/__init__.py
|
homeassistant/components/velbus/__init__.py
|
||||||
homeassistant/components/velbus/binary_sensor.py
|
homeassistant/components/velbus/binary_sensor.py
|
||||||
homeassistant/components/velbus/button.py
|
homeassistant/components/velbus/button.py
|
||||||
@ -1467,6 +1466,7 @@ omit =
|
|||||||
homeassistant/components/vicare/button.py
|
homeassistant/components/vicare/button.py
|
||||||
homeassistant/components/vicare/climate.py
|
homeassistant/components/vicare/climate.py
|
||||||
homeassistant/components/vicare/entity.py
|
homeassistant/components/vicare/entity.py
|
||||||
|
homeassistant/components/vicare/number.py
|
||||||
homeassistant/components/vicare/sensor.py
|
homeassistant/components/vicare/sensor.py
|
||||||
homeassistant/components/vicare/utils.py
|
homeassistant/components/vicare/utils.py
|
||||||
homeassistant/components/vicare/water_heater.py
|
homeassistant/components/vicare/water_heater.py
|
||||||
|
@ -10,6 +10,8 @@
|
|||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
|
"charliermarsh.ruff",
|
||||||
|
"ms-python.pylint",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"redhat.vscode-yaml",
|
"redhat.vscode-yaml",
|
||||||
@ -19,14 +21,6 @@
|
|||||||
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
|
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
|
||||||
"settings": {
|
"settings": {
|
||||||
"python.pythonPath": "/usr/local/bin/python",
|
"python.pythonPath": "/usr/local/bin/python",
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.linting.pylintEnabled": true,
|
|
||||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
|
||||||
"python.linting.pycodestylePath": "/usr/local/bin/pycodestyle",
|
|
||||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle",
|
|
||||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
|
||||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
|
||||||
"python.formatting.provider": "black",
|
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
"editor.formatOnPaste": false,
|
"editor.formatOnPaste": false,
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
@ -45,7 +39,10 @@
|
|||||||
"!include_dir_list scalar",
|
"!include_dir_list scalar",
|
||||||
"!include_dir_merge_list scalar",
|
"!include_dir_merge_list scalar",
|
||||||
"!include_dir_merge_named scalar"
|
"!include_dir_merge_named scalar"
|
||||||
]
|
],
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -60,7 +60,7 @@
|
|||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
||||||
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
|
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
If user exposed functionality or configuration variables are added/changed:
|
If user exposed functionality or configuration variables are added/changed:
|
||||||
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@ -330,7 +330,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.1.2
|
uses: sigstore/cosign-installer@v3.2.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.0.2"
|
||||||
|
|
||||||
|
68
.github/workflows/ci.yaml
vendored
68
.github/workflows/ci.yaml
vendored
@ -35,9 +35,8 @@ on:
|
|||||||
env:
|
env:
|
||||||
CACHE_VERSION: 5
|
CACHE_VERSION: 5
|
||||||
PIP_CACHE_VERSION: 4
|
PIP_CACHE_VERSION: 4
|
||||||
MYPY_CACHE_VERSION: 5
|
MYPY_CACHE_VERSION: 6
|
||||||
BLACK_CACHE_VERSION: 1
|
HA_SHORT_VERSION: "2023.12"
|
||||||
HA_SHORT_VERSION: "2023.11"
|
|
||||||
DEFAULT_PYTHON: "3.11"
|
DEFAULT_PYTHON: "3.11"
|
||||||
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
|
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@ -58,7 +57,6 @@ env:
|
|||||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||||
PIP_CACHE: /tmp/pip-cache
|
PIP_CACHE: /tmp/pip-cache
|
||||||
BLACK_CACHE: /tmp/black-cache
|
|
||||||
SQLALCHEMY_WARN_20: 1
|
SQLALCHEMY_WARN_20: 1
|
||||||
PYTHONASYNCIODEBUG: 1
|
PYTHONASYNCIODEBUG: 1
|
||||||
HASS_CI: 1
|
HASS_CI: 1
|
||||||
@ -261,8 +259,8 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
|
|
||||||
lint-black:
|
lint-ruff-format:
|
||||||
name: Check black
|
name: Check ruff-format
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -276,13 +274,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Generate partial black restore key
|
|
||||||
id: generate-black-key
|
|
||||||
run: |
|
|
||||||
black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3)
|
|
||||||
echo "version=$black_version" >> $GITHUB_OUTPUT
|
|
||||||
echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{
|
|
||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v3.3.2
|
uses: actions/cache/restore@v3.3.2
|
||||||
@ -301,33 +292,12 @@ jobs:
|
|||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore black cache
|
- name: Run ruff-format
|
||||||
uses: actions/cache@v3.3.2
|
|
||||||
with:
|
|
||||||
path: ${{ env.BLACK_CACHE }}
|
|
||||||
key: >-
|
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
|
||||||
steps.generate-black-key.outputs.key }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{
|
|
||||||
env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{
|
|
||||||
env.HA_SHORT_VERSION }}-
|
|
||||||
- name: Run black (fully)
|
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
|
||||||
env:
|
|
||||||
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
|
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
|
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||||
- name: Run black (partially)
|
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
|
||||||
shell: bash
|
|
||||||
env:
|
env:
|
||||||
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
|
RUFF_OUTPUT_FORMAT: github
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
shopt -s globstar
|
|
||||||
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
|
||||||
|
|
||||||
lint-ruff:
|
lint-ruff:
|
||||||
name: Check ruff
|
name: Check ruff
|
||||||
@ -362,22 +332,12 @@ jobs:
|
|||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Register ruff problem matcher
|
- name: Run ruff
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/ruff.json"
|
|
||||||
- name: Run ruff (fully)
|
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||||
- name: Run ruff (partially)
|
env:
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
RUFF_OUTPUT_FORMAT: github
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
shopt -s globstar
|
|
||||||
pre-commit run --hook-stage manual ruff --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
|
||||||
|
|
||||||
lint-other:
|
lint-other:
|
||||||
name: Check other linters
|
name: Check other linters
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
@ -787,7 +747,7 @@ jobs:
|
|||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
--durations=10 \
|
--durations=10 \
|
||||||
@ -824,7 +784,7 @@ jobs:
|
|||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
-n auto \
|
-n auto \
|
||||||
@ -945,7 +905,7 @@ jobs:
|
|||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=20 \
|
--timeout=20 \
|
||||||
-n 1 \
|
-n 1 \
|
||||||
@ -1069,7 +1029,7 @@ jobs:
|
|||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
-n 1 \
|
-n 1 \
|
||||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -29,11 +29,11 @@ jobs:
|
|||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.1.1
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2.22.4
|
uses: github/codeql-action/init@v2.22.8
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2.22.4
|
uses: github/codeql-action/analyze@v2.22.8
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v4.0.1
|
- uses: dessant/lock-threads@v5.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ github.token }}
|
github-token: ${{ github.token }}
|
||||||
issue-inactive-days: "30"
|
issue-inactive-days: "30"
|
||||||
|
30
.github/workflows/matchers/ruff.json
vendored
30
.github/workflows/matchers/ruff.json
vendored
@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "ruff-error",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "ruff-warning",
|
|
||||||
"severity": "warning",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -1,16 +1,11 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.1.1
|
rev: v0.1.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
- --fix
|
- --fix
|
||||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
- id: ruff-format
|
||||||
rev: 23.10.0
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
args:
|
|
||||||
- --quiet
|
|
||||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.2.2
|
rev: v2.2.2
|
||||||
@ -39,7 +34,7 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: yamllint
|
- id: yamllint
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v2.7.1
|
rev: v3.0.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
- repo: https://github.com/cdce8p/python-typing-update
|
- repo: https://github.com/cdce8p/python-typing-update
|
||||||
|
@ -5,3 +5,4 @@ homeassistant/components/*/translations/*.json
|
|||||||
homeassistant/generated/*
|
homeassistant/generated/*
|
||||||
tests/components/lidarr/fixtures/initialize.js
|
tests/components/lidarr/fixtures/initialize.js
|
||||||
tests/components/lidarr/fixtures/initialize-wrong.js
|
tests/components/lidarr/fixtures/initialize-wrong.js
|
||||||
|
tests/fixtures/core/config/yaml_errors/
|
||||||
|
@ -180,6 +180,7 @@ homeassistant.components.image_upload.*
|
|||||||
homeassistant.components.imap.*
|
homeassistant.components.imap.*
|
||||||
homeassistant.components.input_button.*
|
homeassistant.components.input_button.*
|
||||||
homeassistant.components.input_select.*
|
homeassistant.components.input_select.*
|
||||||
|
homeassistant.components.input_text.*
|
||||||
homeassistant.components.integration.*
|
homeassistant.components.integration.*
|
||||||
homeassistant.components.ipp.*
|
homeassistant.components.ipp.*
|
||||||
homeassistant.components.iqvia.*
|
homeassistant.components.iqvia.*
|
||||||
@ -201,6 +202,7 @@ homeassistant.components.ld2410_ble.*
|
|||||||
homeassistant.components.lidarr.*
|
homeassistant.components.lidarr.*
|
||||||
homeassistant.components.lifx.*
|
homeassistant.components.lifx.*
|
||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
|
homeassistant.components.linear_garage_door.*
|
||||||
homeassistant.components.litejet.*
|
homeassistant.components.litejet.*
|
||||||
homeassistant.components.litterrobot.*
|
homeassistant.components.litterrobot.*
|
||||||
homeassistant.components.local_ip.*
|
homeassistant.components.local_ip.*
|
||||||
|
6
.vscode/extensions.json
vendored
6
.vscode/extensions.json
vendored
@ -1,3 +1,7 @@
|
|||||||
{
|
{
|
||||||
"recommendations": ["esbenp.prettier-vscode", "ms-python.python"]
|
"recommendations": [
|
||||||
|
"charliermarsh.ruff",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"ms-python.python"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@ -22,6 +22,14 @@
|
|||||||
"args": ["--debug", "-c", "config", "--skip-pip"],
|
"args": ["--debug", "-c", "config", "--skip-pip"],
|
||||||
"preLaunchTask": "Compile English translations"
|
"preLaunchTask": "Compile English translations"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Home Assistant: Changed tests",
|
||||||
|
"type": "python",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "pytest",
|
||||||
|
"justMyCode": false,
|
||||||
|
"args": ["--timeout=10", "--picked"],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
||||||
// See https://www.home-assistant.io/integrations/debugpy/
|
// See https://www.home-assistant.io/integrations/debugpy/
|
||||||
|
1
.vscode/settings.default.json
vendored
1
.vscode/settings.default.json
vendored
@ -1,6 +1,5 @@
|
|||||||
{
|
{
|
||||||
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||||
"python.formatting.provider": "black",
|
|
||||||
// Added --no-cov to work around TypeError: message must be set
|
// Added --no-cov to work around TypeError: message must be set
|
||||||
// https://github.com/microsoft/vscode-python/issues/14067
|
// https://github.com/microsoft/vscode-python/issues/14067
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
ignore: |
|
ignore: |
|
||||||
azure-*.yml
|
azure-*.yml
|
||||||
|
tests/fixtures/core/config/yaml_errors/
|
||||||
rules:
|
rules:
|
||||||
braces:
|
braces:
|
||||||
level: error
|
level: error
|
||||||
|
65
CODEOWNERS
65
CODEOWNERS
@ -151,8 +151,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||||
/homeassistant/components/blebox/ @bbx-a @riokuu
|
/homeassistant/components/blebox/ @bbx-a @riokuu
|
||||||
/tests/components/blebox/ @bbx-a @riokuu
|
/tests/components/blebox/ @bbx-a @riokuu
|
||||||
/homeassistant/components/blink/ @fronzbot
|
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||||
/tests/components/blink/ @fronzbot
|
/tests/components/blink/ @fronzbot @mkmer
|
||||||
/homeassistant/components/bluemaestro/ @bdraco
|
/homeassistant/components/bluemaestro/ @bdraco
|
||||||
/tests/components/bluemaestro/ @bdraco
|
/tests/components/bluemaestro/ @bdraco
|
||||||
/homeassistant/components/blueprint/ @home-assistant/core
|
/homeassistant/components/blueprint/ @home-assistant/core
|
||||||
@ -170,8 +170,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/bosch_shc/ @tschamm
|
/tests/components/bosch_shc/ @tschamm
|
||||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||||
/tests/components/braviatv/ @bieniu @Drafteed
|
/tests/components/braviatv/ @bieniu @Drafteed
|
||||||
/homeassistant/components/broadlink/ @danielhiversen @felipediel @L-I-Am
|
/homeassistant/components/broadlink/ @danielhiversen @felipediel @L-I-Am @eifinger
|
||||||
/tests/components/broadlink/ @danielhiversen @felipediel @L-I-Am
|
/tests/components/broadlink/ @danielhiversen @felipediel @L-I-Am @eifinger
|
||||||
/homeassistant/components/brother/ @bieniu
|
/homeassistant/components/brother/ @bieniu
|
||||||
/tests/components/brother/ @bieniu
|
/tests/components/brother/ @bieniu
|
||||||
/homeassistant/components/brottsplatskartan/ @gjohansson-ST
|
/homeassistant/components/brottsplatskartan/ @gjohansson-ST
|
||||||
@ -259,6 +259,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/denonavr/ @ol-iver @starkillerOG
|
/tests/components/denonavr/ @ol-iver @starkillerOG
|
||||||
/homeassistant/components/derivative/ @afaucogney
|
/homeassistant/components/derivative/ @afaucogney
|
||||||
/tests/components/derivative/ @afaucogney
|
/tests/components/derivative/ @afaucogney
|
||||||
|
/homeassistant/components/devialet/ @fwestenberg
|
||||||
|
/tests/components/devialet/ @fwestenberg
|
||||||
/homeassistant/components/device_automation/ @home-assistant/core
|
/homeassistant/components/device_automation/ @home-assistant/core
|
||||||
/tests/components/device_automation/ @home-assistant/core
|
/tests/components/device_automation/ @home-assistant/core
|
||||||
/homeassistant/components/device_tracker/ @home-assistant/core
|
/homeassistant/components/device_tracker/ @home-assistant/core
|
||||||
@ -307,12 +309,12 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/eafm/ @Jc2k
|
/tests/components/eafm/ @Jc2k
|
||||||
/homeassistant/components/easyenergy/ @klaasnicolaas
|
/homeassistant/components/easyenergy/ @klaasnicolaas
|
||||||
/tests/components/easyenergy/ @klaasnicolaas
|
/tests/components/easyenergy/ @klaasnicolaas
|
||||||
/homeassistant/components/ecobee/ @marthoc @marcolivierarsenault
|
/homeassistant/components/ecobee/ @marcolivierarsenault
|
||||||
/tests/components/ecobee/ @marthoc @marcolivierarsenault
|
/tests/components/ecobee/ @marcolivierarsenault
|
||||||
/homeassistant/components/ecoforest/ @pjanuario
|
/homeassistant/components/ecoforest/ @pjanuario
|
||||||
/tests/components/ecoforest/ @pjanuario
|
/tests/components/ecoforest/ @pjanuario
|
||||||
/homeassistant/components/econet/ @vangorra @w1ll1am23
|
/homeassistant/components/econet/ @w1ll1am23
|
||||||
/tests/components/econet/ @vangorra @w1ll1am23
|
/tests/components/econet/ @w1ll1am23
|
||||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185
|
/homeassistant/components/ecovacs/ @OverloadUT @mib1185
|
||||||
/homeassistant/components/ecowitt/ @pvizeli
|
/homeassistant/components/ecowitt/ @pvizeli
|
||||||
/tests/components/ecowitt/ @pvizeli
|
/tests/components/ecowitt/ @pvizeli
|
||||||
@ -345,17 +347,15 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/enigma2/ @fbradyirl
|
/homeassistant/components/enigma2/ @fbradyirl
|
||||||
/homeassistant/components/enocean/ @bdurrer
|
/homeassistant/components/enocean/ @bdurrer
|
||||||
/tests/components/enocean/ @bdurrer
|
/tests/components/enocean/ @bdurrer
|
||||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek
|
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
||||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek
|
/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||||
/homeassistant/components/envisalink/ @ufodone
|
|
||||||
/homeassistant/components/ephember/ @ttroy50
|
/homeassistant/components/ephember/ @ttroy50
|
||||||
/homeassistant/components/epson/ @pszafer
|
/homeassistant/components/epson/ @pszafer
|
||||||
/tests/components/epson/ @pszafer
|
/tests/components/epson/ @pszafer
|
||||||
/homeassistant/components/epsonworkforce/ @ThaStealth
|
/homeassistant/components/epsonworkforce/ @ThaStealth
|
||||||
/homeassistant/components/eq3btsmart/ @rytilahti
|
|
||||||
/homeassistant/components/escea/ @lazdavila
|
/homeassistant/components/escea/ @lazdavila
|
||||||
/tests/components/escea/ @lazdavila
|
/tests/components/escea/ @lazdavila
|
||||||
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||||
@ -373,7 +373,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/faa_delays/ @ntilley905
|
/tests/components/faa_delays/ @ntilley905
|
||||||
/homeassistant/components/fan/ @home-assistant/core
|
/homeassistant/components/fan/ @home-assistant/core
|
||||||
/tests/components/fan/ @home-assistant/core
|
/tests/components/fan/ @home-assistant/core
|
||||||
/homeassistant/components/fastdotcom/ @rohankapoorcom
|
/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||||
|
/tests/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||||
/homeassistant/components/fibaro/ @rappenze
|
/homeassistant/components/fibaro/ @rappenze
|
||||||
/tests/components/fibaro/ @rappenze
|
/tests/components/fibaro/ @rappenze
|
||||||
/homeassistant/components/file/ @fabaff
|
/homeassistant/components/file/ @fabaff
|
||||||
@ -490,8 +491,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/greeneye_monitor/ @jkeljo
|
/tests/components/greeneye_monitor/ @jkeljo
|
||||||
/homeassistant/components/group/ @home-assistant/core
|
/homeassistant/components/group/ @home-assistant/core
|
||||||
/tests/components/group/ @home-assistant/core
|
/tests/components/group/ @home-assistant/core
|
||||||
/homeassistant/components/growatt_server/ @muppet3000
|
|
||||||
/tests/components/growatt_server/ @muppet3000
|
|
||||||
/homeassistant/components/guardian/ @bachya
|
/homeassistant/components/guardian/ @bachya
|
||||||
/tests/components/guardian/ @bachya
|
/tests/components/guardian/ @bachya
|
||||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja
|
/homeassistant/components/habitica/ @ASMfreaK @leikoilja
|
||||||
@ -699,6 +698,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/life360/ @pnbruckner
|
/tests/components/life360/ @pnbruckner
|
||||||
/homeassistant/components/light/ @home-assistant/core
|
/homeassistant/components/light/ @home-assistant/core
|
||||||
/tests/components/light/ @home-assistant/core
|
/tests/components/light/ @home-assistant/core
|
||||||
|
/homeassistant/components/linear_garage_door/ @IceBotYT
|
||||||
|
/tests/components/linear_garage_door/ @IceBotYT
|
||||||
/homeassistant/components/linux_battery/ @fabaff
|
/homeassistant/components/linux_battery/ @fabaff
|
||||||
/homeassistant/components/litejet/ @joncar
|
/homeassistant/components/litejet/ @joncar
|
||||||
/tests/components/litejet/ @joncar
|
/tests/components/litejet/ @joncar
|
||||||
@ -811,8 +812,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/mutesync/ @currentoor
|
/tests/components/mutesync/ @currentoor
|
||||||
/homeassistant/components/my/ @home-assistant/core
|
/homeassistant/components/my/ @home-assistant/core
|
||||||
/tests/components/my/ @home-assistant/core
|
/tests/components/my/ @home-assistant/core
|
||||||
/homeassistant/components/myq/ @ehendrix23 @Lash-L
|
|
||||||
/tests/components/myq/ @ehendrix23 @Lash-L
|
|
||||||
/homeassistant/components/mysensors/ @MartinHjelmare @functionpointer
|
/homeassistant/components/mysensors/ @MartinHjelmare @functionpointer
|
||||||
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
||||||
/homeassistant/components/mystrom/ @fabaff
|
/homeassistant/components/mystrom/ @fabaff
|
||||||
@ -929,6 +928,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/oru/ @bvlaicu
|
/homeassistant/components/oru/ @bvlaicu
|
||||||
/homeassistant/components/otbr/ @home-assistant/core
|
/homeassistant/components/otbr/ @home-assistant/core
|
||||||
/tests/components/otbr/ @home-assistant/core
|
/tests/components/otbr/ @home-assistant/core
|
||||||
|
/homeassistant/components/ourgroceries/ @OnFreund
|
||||||
|
/tests/components/ourgroceries/ @OnFreund
|
||||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
||||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
||||||
/homeassistant/components/ovo_energy/ @timmo001
|
/homeassistant/components/ovo_energy/ @timmo001
|
||||||
@ -943,6 +944,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/peco/ @IceBotYT
|
/tests/components/peco/ @IceBotYT
|
||||||
/homeassistant/components/pegel_online/ @mib1185
|
/homeassistant/components/pegel_online/ @mib1185
|
||||||
/tests/components/pegel_online/ @mib1185
|
/tests/components/pegel_online/ @mib1185
|
||||||
|
/homeassistant/components/permobil/ @IsakNyberg
|
||||||
|
/tests/components/permobil/ @IsakNyberg
|
||||||
/homeassistant/components/persistent_notification/ @home-assistant/core
|
/homeassistant/components/persistent_notification/ @home-assistant/core
|
||||||
/tests/components/persistent_notification/ @home-assistant/core
|
/tests/components/persistent_notification/ @home-assistant/core
|
||||||
/homeassistant/components/philips_js/ @elupus
|
/homeassistant/components/philips_js/ @elupus
|
||||||
@ -979,6 +982,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/prometheus/ @knyar
|
/tests/components/prometheus/ @knyar
|
||||||
/homeassistant/components/prosegur/ @dgomes
|
/homeassistant/components/prosegur/ @dgomes
|
||||||
/tests/components/prosegur/ @dgomes
|
/tests/components/prosegur/ @dgomes
|
||||||
|
/homeassistant/components/proximity/ @mib1185
|
||||||
|
/tests/components/proximity/ @mib1185
|
||||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||||
/homeassistant/components/prusalink/ @balloob
|
/homeassistant/components/prusalink/ @balloob
|
||||||
/tests/components/prusalink/ @balloob
|
/tests/components/prusalink/ @balloob
|
||||||
@ -1052,7 +1057,7 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/reolink/ @starkillerOG
|
/tests/components/reolink/ @starkillerOG
|
||||||
/homeassistant/components/repairs/ @home-assistant/core
|
/homeassistant/components/repairs/ @home-assistant/core
|
||||||
/tests/components/repairs/ @home-assistant/core
|
/tests/components/repairs/ @home-assistant/core
|
||||||
/homeassistant/components/repetier/ @MTrab @ShadowBr0ther
|
/homeassistant/components/repetier/ @ShadowBr0ther
|
||||||
/homeassistant/components/rflink/ @javicalle
|
/homeassistant/components/rflink/ @javicalle
|
||||||
/tests/components/rflink/ @javicalle
|
/tests/components/rflink/ @javicalle
|
||||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||||
@ -1061,6 +1066,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/rhasspy/ @balloob @synesthesiam
|
/tests/components/rhasspy/ @balloob @synesthesiam
|
||||||
/homeassistant/components/ridwell/ @bachya
|
/homeassistant/components/ridwell/ @bachya
|
||||||
/tests/components/ridwell/ @bachya
|
/tests/components/ridwell/ @bachya
|
||||||
|
/homeassistant/components/ring/ @sdb9696
|
||||||
|
/tests/components/ring/ @sdb9696
|
||||||
/homeassistant/components/risco/ @OnFreund
|
/homeassistant/components/risco/ @OnFreund
|
||||||
/tests/components/risco/ @OnFreund
|
/tests/components/risco/ @OnFreund
|
||||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||||
@ -1231,8 +1238,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/stookwijzer/ @fwestenberg
|
/tests/components/stookwijzer/ @fwestenberg
|
||||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||||
/tests/components/stream/ @hunterjm @uvjustin @allenporter
|
/tests/components/stream/ @hunterjm @uvjustin @allenporter
|
||||||
/homeassistant/components/stt/ @home-assistant/core @pvizeli
|
/homeassistant/components/stt/ @home-assistant/core
|
||||||
/tests/components/stt/ @home-assistant/core @pvizeli
|
/tests/components/stt/ @home-assistant/core
|
||||||
/homeassistant/components/subaru/ @G-Two
|
/homeassistant/components/subaru/ @G-Two
|
||||||
/tests/components/subaru/ @G-Two
|
/tests/components/subaru/ @G-Two
|
||||||
/homeassistant/components/suez_water/ @ooii
|
/homeassistant/components/suez_water/ @ooii
|
||||||
@ -1317,8 +1324,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tomorrowio/ @raman325 @lymanepp
|
/tests/components/tomorrowio/ @raman325 @lymanepp
|
||||||
/homeassistant/components/totalconnect/ @austinmroczek
|
/homeassistant/components/totalconnect/ @austinmroczek
|
||||||
/tests/components/totalconnect/ @austinmroczek
|
/tests/components/totalconnect/ @austinmroczek
|
||||||
/homeassistant/components/tplink/ @rytilahti @thegardenmonkey
|
/homeassistant/components/tplink/ @rytilahti @thegardenmonkey @bdraco
|
||||||
/tests/components/tplink/ @rytilahti @thegardenmonkey
|
/tests/components/tplink/ @rytilahti @thegardenmonkey @bdraco
|
||||||
/homeassistant/components/tplink_omada/ @MarkGodwin
|
/homeassistant/components/tplink_omada/ @MarkGodwin
|
||||||
/tests/components/tplink_omada/ @MarkGodwin
|
/tests/components/tplink_omada/ @MarkGodwin
|
||||||
/homeassistant/components/traccar/ @ludeeus
|
/homeassistant/components/traccar/ @ludeeus
|
||||||
@ -1339,8 +1346,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||||
/homeassistant/components/trend/ @jpbede
|
/homeassistant/components/trend/ @jpbede
|
||||||
/tests/components/trend/ @jpbede
|
/tests/components/trend/ @jpbede
|
||||||
/homeassistant/components/tts/ @home-assistant/core @pvizeli
|
/homeassistant/components/tts/ @home-assistant/core
|
||||||
/tests/components/tts/ @home-assistant/core @pvizeli
|
/tests/components/tts/ @home-assistant/core
|
||||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
||||||
/homeassistant/components/twentemilieu/ @frenck
|
/homeassistant/components/twentemilieu/ @frenck
|
||||||
@ -1375,6 +1382,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/usgs_earthquakes_feed/ @exxamalte
|
/tests/components/usgs_earthquakes_feed/ @exxamalte
|
||||||
/homeassistant/components/utility_meter/ @dgomes
|
/homeassistant/components/utility_meter/ @dgomes
|
||||||
/tests/components/utility_meter/ @dgomes
|
/tests/components/utility_meter/ @dgomes
|
||||||
|
/homeassistant/components/v2c/ @dgomes
|
||||||
|
/tests/components/v2c/ @dgomes
|
||||||
/homeassistant/components/vacuum/ @home-assistant/core
|
/homeassistant/components/vacuum/ @home-assistant/core
|
||||||
/tests/components/vacuum/ @home-assistant/core
|
/tests/components/vacuum/ @home-assistant/core
|
||||||
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru-
|
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru-
|
||||||
@ -1384,13 +1393,13 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/velux/ @Julius2342
|
/homeassistant/components/velux/ @Julius2342
|
||||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||||
/tests/components/venstar/ @garbled1 @jhollowe
|
/tests/components/venstar/ @garbled1 @jhollowe
|
||||||
/homeassistant/components/verisure/ @frenck
|
|
||||||
/tests/components/verisure/ @frenck
|
|
||||||
/homeassistant/components/versasense/ @imstevenxyz
|
/homeassistant/components/versasense/ @imstevenxyz
|
||||||
/homeassistant/components/version/ @ludeeus
|
/homeassistant/components/version/ @ludeeus
|
||||||
/tests/components/version/ @ludeeus
|
/tests/components/version/ @ludeeus
|
||||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||||
|
/homeassistant/components/vicare/ @CFenner
|
||||||
|
/tests/components/vicare/ @CFenner
|
||||||
/homeassistant/components/vilfo/ @ManneW
|
/homeassistant/components/vilfo/ @ManneW
|
||||||
/tests/components/vilfo/ @ManneW
|
/tests/components/vilfo/ @ManneW
|
||||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||||
@ -1501,8 +1510,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/zerproc/ @emlove
|
/tests/components/zerproc/ @emlove
|
||||||
/homeassistant/components/zeversolar/ @kvanzuijlen
|
/homeassistant/components/zeversolar/ @kvanzuijlen
|
||||||
/tests/components/zeversolar/ @kvanzuijlen
|
/tests/components/zeversolar/ @kvanzuijlen
|
||||||
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly
|
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||||
/tests/components/zha/ @dmulcahey @adminiuga @puddly
|
/tests/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||||
/homeassistant/components/zodiac/ @JulienTant
|
/homeassistant/components/zodiac/ @JulienTant
|
||||||
/tests/components/zodiac/ @JulienTant
|
/tests/components/zodiac/ @JulienTant
|
||||||
/homeassistant/components/zone/ @home-assistant/core
|
/homeassistant/components/zone/ @home-assistant/core
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
# Automatically generated by hassfest.
|
||||||
|
#
|
||||||
|
# To update, run python3 -m script.hassfest -p docker
|
||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM ${BUILD_FROM}
|
FROM ${BUILD_FROM}
|
||||||
|
|
||||||
|
@ -5,8 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
|||||||
# Uninstall pre-installed formatting and linting tools
|
# Uninstall pre-installed formatting and linting tools
|
||||||
# They would conflict with our pinned versions
|
# They would conflict with our pinned versions
|
||||||
RUN \
|
RUN \
|
||||||
pipx uninstall black \
|
pipx uninstall pydocstyle \
|
||||||
&& pipx uninstall pydocstyle \
|
|
||||||
&& pipx uninstall pycodestyle \
|
&& pipx uninstall pycodestyle \
|
||||||
&& pipx uninstall mypy \
|
&& pipx uninstall mypy \
|
||||||
&& pipx uninstall pylint
|
&& pipx uninstall pylint
|
||||||
|
@ -280,7 +280,8 @@ class AuthManager:
|
|||||||
credentials=credentials,
|
credentials=credentials,
|
||||||
name=info.name,
|
name=info.name,
|
||||||
is_active=info.is_active,
|
is_active=info.is_active,
|
||||||
group_ids=[GROUP_ID_ADMIN],
|
group_ids=[GROUP_ID_ADMIN if info.group is None else info.group],
|
||||||
|
local_only=info.local_only,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
|
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
|
||||||
|
@ -134,3 +134,5 @@ class UserMeta(NamedTuple):
|
|||||||
|
|
||||||
name: str | None
|
name: str | None
|
||||||
is_active: bool
|
is_active: bool
|
||||||
|
group: str | None = None
|
||||||
|
local_only: bool | None = None
|
||||||
|
@ -5,9 +5,7 @@ from collections.abc import Mapping
|
|||||||
|
|
||||||
ValueType = (
|
ValueType = (
|
||||||
# Example: entities.all = { read: true, control: true }
|
# Example: entities.all = { read: true, control: true }
|
||||||
Mapping[str, bool]
|
Mapping[str, bool] | bool | None
|
||||||
| bool
|
|
||||||
| None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Example: entities.domains = { light: … }
|
# Example: entities.domains = { light: … }
|
||||||
|
@ -44,7 +44,11 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
DEFAULT_TITLE = "Command Line Authentication"
|
DEFAULT_TITLE = "Command Line Authentication"
|
||||||
|
|
||||||
# which keys to accept from a program's stdout
|
# which keys to accept from a program's stdout
|
||||||
ALLOWED_META_KEYS = ("name",)
|
ALLOWED_META_KEYS = (
|
||||||
|
"name",
|
||||||
|
"group",
|
||||||
|
"local_only",
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
"""Extend parent's __init__.
|
"""Extend parent's __init__.
|
||||||
@ -118,10 +122,15 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
) -> UserMeta:
|
) -> UserMeta:
|
||||||
"""Return extra user metadata for credentials.
|
"""Return extra user metadata for credentials.
|
||||||
|
|
||||||
Currently, only name is supported.
|
Currently, supports name, group and local_only.
|
||||||
"""
|
"""
|
||||||
meta = self._user_meta.get(credentials.data["username"], {})
|
meta = self._user_meta.get(credentials.data["username"], {})
|
||||||
return UserMeta(name=meta.get("name"), is_active=True)
|
return UserMeta(
|
||||||
|
name=meta.get("name"),
|
||||||
|
is_active=True,
|
||||||
|
group=meta.get("group"),
|
||||||
|
local_only=meta.get("local_only") == "true",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CommandLineLoginFlow(LoginFlow):
|
class CommandLineLoginFlow(LoginFlow):
|
||||||
|
@ -10,10 +10,11 @@ from typing import Any, cast
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import async_get_hass, callback
|
||||||
from homeassistant.data_entry_flow import FlowResult
|
from homeassistant.data_entry_flow import FlowResult
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
|
|
||||||
from ..models import Credentials, UserMeta
|
from ..models import Credentials, UserMeta
|
||||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||||
@ -21,10 +22,28 @@ from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
|||||||
AUTH_PROVIDER_TYPE = "legacy_api_password"
|
AUTH_PROVIDER_TYPE = "legacy_api_password"
|
||||||
CONF_API_PASSWORD = "api_password"
|
CONF_API_PASSWORD = "api_password"
|
||||||
|
|
||||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||||
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
|
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
async_create_issue(
|
||||||
|
async_get_hass(),
|
||||||
|
"auth",
|
||||||
|
"deprecated_legacy_api_password",
|
||||||
|
breaks_in_ha_version="2024.6.0",
|
||||||
|
is_fixable=False,
|
||||||
|
severity=IssueSeverity.WARNING,
|
||||||
|
translation_key="deprecated_legacy_api_password",
|
||||||
|
)
|
||||||
|
|
||||||
|
return _CONFIG_SCHEMA(config) # type: ignore[no-any-return]
|
||||||
|
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = _create_repair_and_validate
|
||||||
|
|
||||||
|
|
||||||
LEGACY_USER_NAME = "Legacy API password user"
|
LEGACY_USER_NAME = "Legacy API password user"
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ from homeassistant.core import callback
|
|||||||
from homeassistant.data_entry_flow import FlowResult
|
from homeassistant.data_entry_flow import FlowResult
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.network import is_cloud_connection
|
||||||
|
|
||||||
from .. import InvalidAuthError
|
from .. import InvalidAuthError
|
||||||
from ..models import Credentials, RefreshToken, UserMeta
|
from ..models import Credentials, RefreshToken, UserMeta
|
||||||
@ -192,11 +193,8 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||||||
if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies):
|
if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies):
|
||||||
raise InvalidAuthError("Can't allow access from a proxy server")
|
raise InvalidAuthError("Can't allow access from a proxy server")
|
||||||
|
|
||||||
if "cloud" in self.hass.config.components:
|
if is_cloud_connection(self.hass):
|
||||||
from hass_nabucasa import remote # pylint: disable=import-outside-toplevel
|
raise InvalidAuthError("Can't allow access from Home Assistant Cloud")
|
||||||
|
|
||||||
if remote.is_cloud_request.get():
|
|
||||||
raise InvalidAuthError("Can't allow access from Home Assistant Cloud")
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_validate_refresh_token(
|
def async_validate_refresh_token(
|
||||||
|
@ -41,6 +41,7 @@ from .setup import (
|
|||||||
DATA_SETUP,
|
DATA_SETUP,
|
||||||
DATA_SETUP_STARTED,
|
DATA_SETUP_STARTED,
|
||||||
DATA_SETUP_TIME,
|
DATA_SETUP_TIME,
|
||||||
|
async_notify_setup_error,
|
||||||
async_set_domains_to_be_loaded,
|
async_set_domains_to_be_loaded,
|
||||||
async_setup_component,
|
async_setup_component,
|
||||||
)
|
)
|
||||||
@ -292,7 +293,8 @@ async def async_from_config_dict(
|
|||||||
try:
|
try:
|
||||||
await conf_util.async_process_ha_core_config(hass, core_config)
|
await conf_util.async_process_ha_core_config(hass, core_config)
|
||||||
except vol.Invalid as config_err:
|
except vol.Invalid as config_err:
|
||||||
conf_util.async_log_exception(config_err, "homeassistant", core_config, hass)
|
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
||||||
|
async_notify_setup_error(hass, core.DOMAIN)
|
||||||
return None
|
return None
|
||||||
except HomeAssistantError:
|
except HomeAssistantError:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
@ -398,7 +400,7 @@ def async_enable_logging(
|
|||||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||||
|
|
||||||
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
||||||
"Uncaught exception", exc_info=args # type: ignore[arg-type]
|
"Uncaught exception", exc_info=args
|
||||||
)
|
)
|
||||||
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
||||||
"Uncaught thread exception",
|
"Uncaught thread exception",
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"domain": "eq3",
|
"domain": "eq3",
|
||||||
"name": "eQ-3",
|
"name": "eQ-3",
|
||||||
"integrations": ["eq3btsmart", "maxcube"]
|
"integrations": ["maxcube"]
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["accuweather==2.1.0"]
|
"requirements": ["accuweather==2.1.1"]
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["adax", "adax_local"],
|
"loggers": ["adax", "adax_local"],
|
||||||
"requirements": ["adax==0.3.0", "Adax-local==0.1.5"]
|
"requirements": ["adax==0.4.0", "Adax-local==0.1.5"]
|
||||||
}
|
}
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["adguardhome"],
|
"loggers": ["adguardhome"],
|
||||||
"requirements": ["adguardhome==0.6.2"]
|
"requirements": ["adguardhome==0.6.3"]
|
||||||
}
|
}
|
||||||
|
@ -22,20 +22,13 @@ SCAN_INTERVAL = timedelta(seconds=300)
|
|||||||
PARALLEL_UPDATES = 4
|
PARALLEL_UPDATES = 4
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass(kw_only=True)
|
||||||
class AdGuardHomeEntityDescriptionMixin:
|
class AdGuardHomeEntityDescription(SensorEntityDescription):
|
||||||
"""Mixin for required keys."""
|
"""Describes AdGuard Home sensor entity."""
|
||||||
|
|
||||||
value_fn: Callable[[AdGuardHome], Coroutine[Any, Any, int | float]]
|
value_fn: Callable[[AdGuardHome], Coroutine[Any, Any, int | float]]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AdGuardHomeEntityDescription(
|
|
||||||
SensorEntityDescription, AdGuardHomeEntityDescriptionMixin
|
|
||||||
):
|
|
||||||
"""Describes AdGuard Home sensor entity."""
|
|
||||||
|
|
||||||
|
|
||||||
SENSORS: tuple[AdGuardHomeEntityDescription, ...] = (
|
SENSORS: tuple[AdGuardHomeEntityDescription, ...] = (
|
||||||
AdGuardHomeEntityDescription(
|
AdGuardHomeEntityDescription(
|
||||||
key="dns_queries",
|
key="dns_queries",
|
||||||
|
@ -10,6 +10,9 @@
|
|||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"ssl": "[%key:common::config_flow::data::ssl%]",
|
"ssl": "[%key:common::config_flow::data::ssl%]",
|
||||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the device running your AdGuard Home."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"hassio_confirm": {
|
"hassio_confirm": {
|
||||||
|
@ -21,22 +21,15 @@ SCAN_INTERVAL = timedelta(seconds=10)
|
|||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass(kw_only=True)
|
||||||
class AdGuardHomeSwitchEntityDescriptionMixin:
|
class AdGuardHomeSwitchEntityDescription(SwitchEntityDescription):
|
||||||
"""Mixin for required keys."""
|
"""Describes AdGuard Home switch entity."""
|
||||||
|
|
||||||
is_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, bool]]]
|
is_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, bool]]]
|
||||||
turn_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]]
|
turn_on_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]]
|
||||||
turn_off_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]]
|
turn_off_fn: Callable[[AdGuardHome], Callable[[], Coroutine[Any, Any, None]]]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AdGuardHomeSwitchEntityDescription(
|
|
||||||
SwitchEntityDescription, AdGuardHomeSwitchEntityDescriptionMixin
|
|
||||||
):
|
|
||||||
"""Describes AdGuard Home switch entity."""
|
|
||||||
|
|
||||||
|
|
||||||
SWITCHES: tuple[AdGuardHomeSwitchEntityDescription, ...] = (
|
SWITCHES: tuple[AdGuardHomeSwitchEntityDescription, ...] = (
|
||||||
AdGuardHomeSwitchEntityDescription(
|
AdGuardHomeSwitchEntityDescription(
|
||||||
key="protection",
|
key="protection",
|
||||||
|
@ -122,6 +122,13 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
|||||||
if self._ac.get(ADVANTAGE_AIR_AUTOFAN):
|
if self._ac.get(ADVANTAGE_AIR_AUTOFAN):
|
||||||
self._attr_fan_modes += [FAN_AUTO]
|
self._attr_fan_modes += [FAN_AUTO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float | None:
|
||||||
|
"""Return the selected zones current temperature."""
|
||||||
|
if self._myzone:
|
||||||
|
return self._myzone["measuredTemp"]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def target_temperature(self) -> float | None:
|
def target_temperature(self) -> float | None:
|
||||||
"""Return the current target temperature."""
|
"""Return the current target temperature."""
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
"""The AEMET OpenData component."""
|
"""The AEMET OpenData component."""
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aemet_opendata.exceptions import TownNotFound
|
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||||
from aemet_opendata.interface import AEMET, ConnectionOptions
|
from aemet_opendata.interface import AEMET, ConnectionOptions
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@ -39,8 +38,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
except TownNotFound as err:
|
except TownNotFound as err:
|
||||||
_LOGGER.error(err)
|
_LOGGER.error(err)
|
||||||
return False
|
return False
|
||||||
except asyncio.TimeoutError as err:
|
except AemetError as err:
|
||||||
raise ConfigEntryNotReady("AEMET OpenData API timed out") from err
|
raise ConfigEntryNotReady(err) from err
|
||||||
|
|
||||||
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
||||||
await weather_coordinator.async_config_entry_first_refresh()
|
await weather_coordinator.async_config_entry_first_refresh()
|
||||||
|
@ -12,6 +12,18 @@ from aemet_opendata.const import (
|
|||||||
AOD_COND_RAINY,
|
AOD_COND_RAINY,
|
||||||
AOD_COND_SNOWY,
|
AOD_COND_SNOWY,
|
||||||
AOD_COND_SUNNY,
|
AOD_COND_SUNNY,
|
||||||
|
AOD_CONDITION,
|
||||||
|
AOD_FORECAST_DAILY,
|
||||||
|
AOD_FORECAST_HOURLY,
|
||||||
|
AOD_PRECIPITATION,
|
||||||
|
AOD_PRECIPITATION_PROBABILITY,
|
||||||
|
AOD_TEMP,
|
||||||
|
AOD_TEMP_MAX,
|
||||||
|
AOD_TEMP_MIN,
|
||||||
|
AOD_TIMESTAMP,
|
||||||
|
AOD_WIND_DIRECTION,
|
||||||
|
AOD_WIND_SPEED,
|
||||||
|
AOD_WIND_SPEED_MAX,
|
||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.components.weather import (
|
from homeassistant.components.weather import (
|
||||||
@ -25,6 +37,15 @@ from homeassistant.components.weather import (
|
|||||||
ATTR_CONDITION_RAINY,
|
ATTR_CONDITION_RAINY,
|
||||||
ATTR_CONDITION_SNOWY,
|
ATTR_CONDITION_SNOWY,
|
||||||
ATTR_CONDITION_SUNNY,
|
ATTR_CONDITION_SUNNY,
|
||||||
|
ATTR_FORECAST_CONDITION,
|
||||||
|
ATTR_FORECAST_NATIVE_PRECIPITATION,
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP,
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
|
ATTR_FORECAST_TIME,
|
||||||
|
ATTR_FORECAST_WIND_BEARING,
|
||||||
)
|
)
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
|
|
||||||
@ -122,3 +143,30 @@ FORECAST_MODE_ATTR_API = {
|
|||||||
FORECAST_MODE_DAILY: ATTR_API_FORECAST_DAILY,
|
FORECAST_MODE_DAILY: ATTR_API_FORECAST_DAILY,
|
||||||
FORECAST_MODE_HOURLY: ATTR_API_FORECAST_HOURLY,
|
FORECAST_MODE_HOURLY: ATTR_API_FORECAST_HOURLY,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FORECAST_MAP = {
|
||||||
|
AOD_FORECAST_DAILY: {
|
||||||
|
AOD_CONDITION: ATTR_FORECAST_CONDITION,
|
||||||
|
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
|
AOD_TEMP_MAX: ATTR_FORECAST_NATIVE_TEMP,
|
||||||
|
AOD_TEMP_MIN: ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||||
|
AOD_TIMESTAMP: ATTR_FORECAST_TIME,
|
||||||
|
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
||||||
|
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
},
|
||||||
|
AOD_FORECAST_HOURLY: {
|
||||||
|
AOD_CONDITION: ATTR_FORECAST_CONDITION,
|
||||||
|
AOD_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
|
AOD_PRECIPITATION: ATTR_FORECAST_NATIVE_PRECIPITATION,
|
||||||
|
AOD_TEMP: ATTR_FORECAST_NATIVE_TEMP,
|
||||||
|
AOD_TIMESTAMP: ATTR_FORECAST_TIME,
|
||||||
|
AOD_WIND_DIRECTION: ATTR_FORECAST_WIND_BEARING,
|
||||||
|
AOD_WIND_SPEED_MAX: ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||||
|
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
WEATHER_FORECAST_MODES = {
|
||||||
|
AOD_FORECAST_DAILY: "daily",
|
||||||
|
AOD_FORECAST_HOURLY: "hourly",
|
||||||
|
}
|
||||||
|
23
homeassistant/components/aemet/entity.py
Normal file
23
homeassistant/components/aemet/entity.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""Entity classes for the AEMET OpenData integration."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aemet_opendata.helpers import dict_nested_value
|
||||||
|
|
||||||
|
from homeassistant.components.weather import Forecast
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .weather_update_coordinator import WeatherUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
class AemetEntity(CoordinatorEntity[WeatherUpdateCoordinator]):
|
||||||
|
"""Define an AEMET entity."""
|
||||||
|
|
||||||
|
def get_aemet_forecast(self, forecast_mode: str) -> list[Forecast]:
|
||||||
|
"""Return AEMET entity forecast by mode."""
|
||||||
|
return self.coordinator.data["forecast"][forecast_mode]
|
||||||
|
|
||||||
|
def get_aemet_value(self, keys: list[str]) -> Any:
|
||||||
|
"""Return AEMET entity value by keys."""
|
||||||
|
return dict_nested_value(self.coordinator.data["lib"], keys)
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aemet_opendata"],
|
"loggers": ["aemet_opendata"],
|
||||||
"requirements": ["AEMET-OpenData==0.4.5"]
|
"requirements": ["AEMET-OpenData==0.4.6"]
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,19 @@
|
|||||||
"""Support for the AEMET OpenData service."""
|
"""Support for the AEMET OpenData service."""
|
||||||
from typing import cast
|
|
||||||
|
from aemet_opendata.const import (
|
||||||
|
AOD_CONDITION,
|
||||||
|
AOD_FORECAST_DAILY,
|
||||||
|
AOD_FORECAST_HOURLY,
|
||||||
|
AOD_HUMIDITY,
|
||||||
|
AOD_PRESSURE,
|
||||||
|
AOD_TEMP,
|
||||||
|
AOD_WEATHER,
|
||||||
|
AOD_WIND_DIRECTION,
|
||||||
|
AOD_WIND_SPEED,
|
||||||
|
AOD_WIND_SPEED_MAX,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.components.weather import (
|
from homeassistant.components.weather import (
|
||||||
ATTR_FORECAST_CONDITION,
|
|
||||||
ATTR_FORECAST_NATIVE_PRECIPITATION,
|
|
||||||
ATTR_FORECAST_NATIVE_TEMP,
|
|
||||||
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
|
||||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
|
||||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
|
||||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
|
||||||
ATTR_FORECAST_TIME,
|
|
||||||
ATTR_FORECAST_WIND_BEARING,
|
|
||||||
DOMAIN as WEATHER_DOMAIN,
|
DOMAIN as WEATHER_DOMAIN,
|
||||||
Forecast,
|
Forecast,
|
||||||
SingleCoordinatorWeatherEntity,
|
SingleCoordinatorWeatherEntity,
|
||||||
@ -28,55 +31,16 @@ from homeassistant.helpers import entity_registry as er
|
|||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_API_CONDITION,
|
|
||||||
ATTR_API_FORECAST_CONDITION,
|
|
||||||
ATTR_API_FORECAST_PRECIPITATION,
|
|
||||||
ATTR_API_FORECAST_PRECIPITATION_PROBABILITY,
|
|
||||||
ATTR_API_FORECAST_TEMP,
|
|
||||||
ATTR_API_FORECAST_TEMP_LOW,
|
|
||||||
ATTR_API_FORECAST_TIME,
|
|
||||||
ATTR_API_FORECAST_WIND_BEARING,
|
|
||||||
ATTR_API_FORECAST_WIND_MAX_SPEED,
|
|
||||||
ATTR_API_FORECAST_WIND_SPEED,
|
|
||||||
ATTR_API_HUMIDITY,
|
|
||||||
ATTR_API_PRESSURE,
|
|
||||||
ATTR_API_TEMPERATURE,
|
|
||||||
ATTR_API_WIND_BEARING,
|
|
||||||
ATTR_API_WIND_MAX_SPEED,
|
|
||||||
ATTR_API_WIND_SPEED,
|
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
|
CONDITIONS_MAP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
ENTRY_NAME,
|
ENTRY_NAME,
|
||||||
ENTRY_WEATHER_COORDINATOR,
|
ENTRY_WEATHER_COORDINATOR,
|
||||||
FORECAST_MODE_ATTR_API,
|
WEATHER_FORECAST_MODES,
|
||||||
FORECAST_MODE_DAILY,
|
|
||||||
FORECAST_MODE_HOURLY,
|
|
||||||
FORECAST_MODES,
|
|
||||||
)
|
)
|
||||||
|
from .entity import AemetEntity
|
||||||
from .weather_update_coordinator import WeatherUpdateCoordinator
|
from .weather_update_coordinator import WeatherUpdateCoordinator
|
||||||
|
|
||||||
FORECAST_MAP = {
|
|
||||||
FORECAST_MODE_DAILY: {
|
|
||||||
ATTR_API_FORECAST_CONDITION: ATTR_FORECAST_CONDITION,
|
|
||||||
ATTR_API_FORECAST_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
|
||||||
ATTR_API_FORECAST_TEMP_LOW: ATTR_FORECAST_NATIVE_TEMP_LOW,
|
|
||||||
ATTR_API_FORECAST_TEMP: ATTR_FORECAST_NATIVE_TEMP,
|
|
||||||
ATTR_API_FORECAST_TIME: ATTR_FORECAST_TIME,
|
|
||||||
ATTR_API_FORECAST_WIND_BEARING: ATTR_FORECAST_WIND_BEARING,
|
|
||||||
ATTR_API_FORECAST_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
|
||||||
},
|
|
||||||
FORECAST_MODE_HOURLY: {
|
|
||||||
ATTR_API_FORECAST_CONDITION: ATTR_FORECAST_CONDITION,
|
|
||||||
ATTR_API_FORECAST_PRECIPITATION_PROBABILITY: ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
|
||||||
ATTR_API_FORECAST_PRECIPITATION: ATTR_FORECAST_NATIVE_PRECIPITATION,
|
|
||||||
ATTR_API_FORECAST_TEMP: ATTR_FORECAST_NATIVE_TEMP,
|
|
||||||
ATTR_API_FORECAST_TIME: ATTR_FORECAST_TIME,
|
|
||||||
ATTR_API_FORECAST_WIND_BEARING: ATTR_FORECAST_WIND_BEARING,
|
|
||||||
ATTR_API_FORECAST_WIND_MAX_SPEED: ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
|
||||||
ATTR_API_FORECAST_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -95,11 +59,11 @@ async def async_setup_entry(
|
|||||||
if entity_registry.async_get_entity_id(
|
if entity_registry.async_get_entity_id(
|
||||||
WEATHER_DOMAIN,
|
WEATHER_DOMAIN,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
f"{config_entry.unique_id} {FORECAST_MODE_HOURLY}",
|
f"{config_entry.unique_id} {WEATHER_FORECAST_MODES[AOD_FORECAST_HOURLY]}",
|
||||||
):
|
):
|
||||||
for mode in FORECAST_MODES:
|
for mode, mode_id in WEATHER_FORECAST_MODES.items():
|
||||||
name = f"{domain_data[ENTRY_NAME]} {mode}"
|
name = f"{domain_data[ENTRY_NAME]} {mode_id}"
|
||||||
unique_id = f"{config_entry.unique_id} {mode}"
|
unique_id = f"{config_entry.unique_id} {mode_id}"
|
||||||
entities.append(AemetWeather(name, unique_id, weather_coordinator, mode))
|
entities.append(AemetWeather(name, unique_id, weather_coordinator, mode))
|
||||||
else:
|
else:
|
||||||
entities.append(
|
entities.append(
|
||||||
@ -107,15 +71,18 @@ async def async_setup_entry(
|
|||||||
domain_data[ENTRY_NAME],
|
domain_data[ENTRY_NAME],
|
||||||
config_entry.unique_id,
|
config_entry.unique_id,
|
||||||
weather_coordinator,
|
weather_coordinator,
|
||||||
FORECAST_MODE_DAILY,
|
AOD_FORECAST_DAILY,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
async_add_entities(entities, False)
|
async_add_entities(entities, False)
|
||||||
|
|
||||||
|
|
||||||
class AemetWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
class AemetWeather(
|
||||||
"""Implementation of an AEMET OpenData sensor."""
|
AemetEntity,
|
||||||
|
SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator],
|
||||||
|
):
|
||||||
|
"""Implementation of an AEMET OpenData weather."""
|
||||||
|
|
||||||
_attr_attribution = ATTRIBUTION
|
_attr_attribution = ATTRIBUTION
|
||||||
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||||
@ -137,7 +104,7 @@ class AemetWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
|||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._forecast_mode = forecast_mode
|
self._forecast_mode = forecast_mode
|
||||||
self._attr_entity_registry_enabled_default = (
|
self._attr_entity_registry_enabled_default = (
|
||||||
self._forecast_mode == FORECAST_MODE_DAILY
|
self._forecast_mode == AOD_FORECAST_DAILY
|
||||||
)
|
)
|
||||||
self._attr_name = name
|
self._attr_name = name
|
||||||
self._attr_unique_id = unique_id
|
self._attr_unique_id = unique_id
|
||||||
@ -145,61 +112,50 @@ class AemetWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordinator]):
|
|||||||
@property
|
@property
|
||||||
def condition(self):
|
def condition(self):
|
||||||
"""Return the current condition."""
|
"""Return the current condition."""
|
||||||
return self.coordinator.data[ATTR_API_CONDITION]
|
cond = self.get_aemet_value([AOD_WEATHER, AOD_CONDITION])
|
||||||
|
return CONDITIONS_MAP.get(cond)
|
||||||
def _forecast(self, forecast_mode: str) -> list[Forecast]:
|
|
||||||
"""Return the forecast array."""
|
|
||||||
forecasts = self.coordinator.data[FORECAST_MODE_ATTR_API[forecast_mode]]
|
|
||||||
forecast_map = FORECAST_MAP[forecast_mode]
|
|
||||||
return cast(
|
|
||||||
list[Forecast],
|
|
||||||
[
|
|
||||||
{ha_key: forecast[api_key] for api_key, ha_key in forecast_map.items()}
|
|
||||||
for forecast in forecasts
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def forecast(self) -> list[Forecast]:
|
def forecast(self) -> list[Forecast]:
|
||||||
"""Return the forecast array."""
|
"""Return the forecast array."""
|
||||||
return self._forecast(self._forecast_mode)
|
return self.get_aemet_forecast(self._forecast_mode)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_forecast_daily(self) -> list[Forecast]:
|
def _async_forecast_daily(self) -> list[Forecast]:
|
||||||
"""Return the daily forecast in native units."""
|
"""Return the daily forecast in native units."""
|
||||||
return self._forecast(FORECAST_MODE_DAILY)
|
return self.get_aemet_forecast(AOD_FORECAST_DAILY)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_forecast_hourly(self) -> list[Forecast]:
|
def _async_forecast_hourly(self) -> list[Forecast]:
|
||||||
"""Return the hourly forecast in native units."""
|
"""Return the hourly forecast in native units."""
|
||||||
return self._forecast(FORECAST_MODE_HOURLY)
|
return self.get_aemet_forecast(AOD_FORECAST_HOURLY)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def humidity(self):
|
def humidity(self):
|
||||||
"""Return the humidity."""
|
"""Return the humidity."""
|
||||||
return self.coordinator.data[ATTR_API_HUMIDITY]
|
return self.get_aemet_value([AOD_WEATHER, AOD_HUMIDITY])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_pressure(self):
|
def native_pressure(self):
|
||||||
"""Return the pressure."""
|
"""Return the pressure."""
|
||||||
return self.coordinator.data[ATTR_API_PRESSURE]
|
return self.get_aemet_value([AOD_WEATHER, AOD_PRESSURE])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_temperature(self):
|
def native_temperature(self):
|
||||||
"""Return the temperature."""
|
"""Return the temperature."""
|
||||||
return self.coordinator.data[ATTR_API_TEMPERATURE]
|
return self.get_aemet_value([AOD_WEATHER, AOD_TEMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def wind_bearing(self):
|
def wind_bearing(self):
|
||||||
"""Return the wind bearing."""
|
"""Return the wind bearing."""
|
||||||
return self.coordinator.data[ATTR_API_WIND_BEARING]
|
return self.get_aemet_value([AOD_WEATHER, AOD_WIND_DIRECTION])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_wind_gust_speed(self):
|
def native_wind_gust_speed(self):
|
||||||
"""Return the wind gust speed in native units."""
|
"""Return the wind gust speed in native units."""
|
||||||
return self.coordinator.data[ATTR_API_WIND_MAX_SPEED]
|
return self.get_aemet_value([AOD_WEATHER, AOD_WIND_SPEED_MAX])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_wind_speed(self):
|
def native_wind_speed(self):
|
||||||
"""Return the wind speed."""
|
"""Return the wind speed."""
|
||||||
return self.coordinator.data[ATTR_API_WIND_SPEED]
|
return self.get_aemet_value([AOD_WEATHER, AOD_WIND_SPEED])
|
||||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Final
|
from typing import Any, Final, cast
|
||||||
|
|
||||||
from aemet_opendata.const import (
|
from aemet_opendata.const import (
|
||||||
AEMET_ATTR_DATE,
|
AEMET_ATTR_DATE,
|
||||||
@ -31,17 +31,24 @@ from aemet_opendata.const import (
|
|||||||
AEMET_ATTR_TEMPERATURE,
|
AEMET_ATTR_TEMPERATURE,
|
||||||
AEMET_ATTR_WIND,
|
AEMET_ATTR_WIND,
|
||||||
AEMET_ATTR_WIND_GUST,
|
AEMET_ATTR_WIND_GUST,
|
||||||
|
AOD_CONDITION,
|
||||||
|
AOD_FORECAST,
|
||||||
|
AOD_FORECAST_DAILY,
|
||||||
|
AOD_FORECAST_HOURLY,
|
||||||
|
AOD_TOWN,
|
||||||
ATTR_DATA,
|
ATTR_DATA,
|
||||||
)
|
)
|
||||||
from aemet_opendata.exceptions import AemetError
|
from aemet_opendata.exceptions import AemetError
|
||||||
from aemet_opendata.forecast import ForecastValue
|
from aemet_opendata.forecast import ForecastValue
|
||||||
from aemet_opendata.helpers import (
|
from aemet_opendata.helpers import (
|
||||||
|
dict_nested_value,
|
||||||
get_forecast_day_value,
|
get_forecast_day_value,
|
||||||
get_forecast_hour_value,
|
get_forecast_hour_value,
|
||||||
get_forecast_interval_value,
|
get_forecast_interval_value,
|
||||||
)
|
)
|
||||||
from aemet_opendata.interface import AEMET
|
from aemet_opendata.interface import AEMET
|
||||||
|
|
||||||
|
from homeassistant.components.weather import Forecast
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
@ -79,6 +86,7 @@ from .const import (
|
|||||||
ATTR_API_WIND_SPEED,
|
ATTR_API_WIND_SPEED,
|
||||||
CONDITIONS_MAP,
|
CONDITIONS_MAP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
FORECAST_MAP,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -239,6 +247,12 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
weather_response, now
|
weather_response, now
|
||||||
)
|
)
|
||||||
|
|
||||||
|
data = self.aemet.data()
|
||||||
|
forecasts: list[dict[str, Forecast]] = {
|
||||||
|
AOD_FORECAST_DAILY: self.aemet_forecast(data, AOD_FORECAST_DAILY),
|
||||||
|
AOD_FORECAST_HOURLY: self.aemet_forecast(data, AOD_FORECAST_HOURLY),
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_API_CONDITION: condition,
|
ATTR_API_CONDITION: condition,
|
||||||
ATTR_API_FORECAST_DAILY: forecast_daily,
|
ATTR_API_FORECAST_DAILY: forecast_daily,
|
||||||
@ -261,8 +275,29 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
ATTR_API_WIND_BEARING: wind_bearing,
|
ATTR_API_WIND_BEARING: wind_bearing,
|
||||||
ATTR_API_WIND_MAX_SPEED: wind_max_speed,
|
ATTR_API_WIND_MAX_SPEED: wind_max_speed,
|
||||||
ATTR_API_WIND_SPEED: wind_speed,
|
ATTR_API_WIND_SPEED: wind_speed,
|
||||||
|
"forecast": forecasts,
|
||||||
|
"lib": data,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def aemet_forecast(
|
||||||
|
self,
|
||||||
|
data: dict[str, Any],
|
||||||
|
forecast_mode: str,
|
||||||
|
) -> list[Forecast]:
|
||||||
|
"""Return the forecast array."""
|
||||||
|
forecasts = dict_nested_value(data, [AOD_TOWN, forecast_mode, AOD_FORECAST])
|
||||||
|
forecast_map = FORECAST_MAP[forecast_mode]
|
||||||
|
forecast_list: list[dict[str, Any]] = []
|
||||||
|
for forecast in forecasts:
|
||||||
|
cur_forecast: dict[str, Any] = {}
|
||||||
|
for api_key, ha_key in forecast_map.items():
|
||||||
|
value = forecast[api_key]
|
||||||
|
if api_key == AOD_CONDITION:
|
||||||
|
value = CONDITIONS_MAP.get(value)
|
||||||
|
cur_forecast[ha_key] = value
|
||||||
|
forecast_list += [cur_forecast]
|
||||||
|
return cast(list[Forecast], forecast_list)
|
||||||
|
|
||||||
def _get_daily_forecast_from_weather_response(self, weather_response, now):
|
def _get_daily_forecast_from_weather_response(self, weather_response, now):
|
||||||
if weather_response.daily:
|
if weather_response.daily:
|
||||||
parse = False
|
parse = False
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"port": "[%key:common::config_flow::data::port%]"
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The IP address of the Agent DVR server."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -11,6 +11,7 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -50,6 +51,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
# Clean up unused device entries with no entities
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
|
device_entries = dr.async_entries_for_config_entry(
|
||||||
|
device_registry, config_entry_id=entry.entry_id
|
||||||
|
)
|
||||||
|
for dev in device_entries:
|
||||||
|
dev_entities = er.async_entries_for_device(
|
||||||
|
entity_registry, dev.id, include_disabled_entities=True
|
||||||
|
)
|
||||||
|
if not dev_entities:
|
||||||
|
device_registry.async_remove_device(dev.id)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -148,13 +148,14 @@ class AirNowSensor(CoordinatorEntity[AirNowDataUpdateCoordinator], SensorEntity)
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
_device_id = f"{coordinator.latitude}-{coordinator.longitude}"
|
||||||
|
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._attr_unique_id = (
|
self._attr_unique_id = f"{_device_id}-{description.key.lower()}"
|
||||||
f"{coordinator.latitude}-{coordinator.longitude}-{description.key.lower()}"
|
|
||||||
)
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
entry_type=DeviceEntryType.SERVICE,
|
entry_type=DeviceEntryType.SERVICE,
|
||||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
identifiers={(DOMAIN, _device_id)},
|
||||||
manufacturer=DEFAULT_NAME,
|
manufacturer=DEFAULT_NAME,
|
||||||
name=DEFAULT_NAME,
|
name=DEFAULT_NAME,
|
||||||
)
|
)
|
||||||
|
@ -3,7 +3,6 @@ from typing import Final
|
|||||||
|
|
||||||
DOMAIN: Final = "airq"
|
DOMAIN: Final = "airq"
|
||||||
MANUFACTURER: Final = "CorantGmbH"
|
MANUFACTURER: Final = "CorantGmbH"
|
||||||
TARGET_ROUTE: Final = "average"
|
|
||||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
||||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
||||||
UPDATE_INTERVAL: float = 10.0
|
UPDATE_INTERVAL: float = 10.0
|
||||||
|
@ -13,7 +13,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL
|
from .const import DOMAIN, MANUFACTURER, UPDATE_INTERVAL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -56,6 +56,4 @@ class AirQCoordinator(DataUpdateCoordinator):
|
|||||||
hw_version=info["hw_version"],
|
hw_version=info["hw_version"],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return await self.airq.get_latest_data()
|
||||||
data = await self.airq.get(TARGET_ROUTE)
|
|
||||||
return self.airq.drop_uncertainties_from_data(data)
|
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairq"],
|
"loggers": ["aioairq"],
|
||||||
"requirements": ["aioairq==0.2.4"]
|
"requirements": ["aioairq==0.3.1"]
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,9 @@
|
|||||||
"title": "Set up your AirTouch 4 connection details.",
|
"title": "Set up your AirTouch 4 connection details.",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]"
|
"host": "[%key:common::config_flow::data::host%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of your AirTouch controller."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,9 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"ip_address": "[%key:common::config_flow::data::host%]",
|
"ip_address": "[%key:common::config_flow::data::host%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"ip_address": "The hostname or IP address of your AirVisual Pro device."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -9,7 +9,6 @@ from aioairzone.const import (
|
|||||||
AZD_BATTERY_LOW,
|
AZD_BATTERY_LOW,
|
||||||
AZD_ERRORS,
|
AZD_ERRORS,
|
||||||
AZD_FLOOR_DEMAND,
|
AZD_FLOOR_DEMAND,
|
||||||
AZD_NAME,
|
|
||||||
AZD_PROBLEMS,
|
AZD_PROBLEMS,
|
||||||
AZD_SYSTEMS,
|
AZD_SYSTEMS,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
@ -45,7 +44,6 @@ SYSTEM_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ..
|
|||||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
key=AZD_PROBLEMS,
|
key=AZD_PROBLEMS,
|
||||||
name="Problem",
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -53,17 +51,16 @@ ZONE_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]
|
|||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
key=AZD_AIR_DEMAND,
|
key=AZD_AIR_DEMAND,
|
||||||
name="Air Demand",
|
translation_key="air_demand",
|
||||||
),
|
),
|
||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
device_class=BinarySensorDeviceClass.BATTERY,
|
device_class=BinarySensorDeviceClass.BATTERY,
|
||||||
key=AZD_BATTERY_LOW,
|
key=AZD_BATTERY_LOW,
|
||||||
name="Battery Low",
|
|
||||||
),
|
),
|
||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
key=AZD_FLOOR_DEMAND,
|
key=AZD_FLOOR_DEMAND,
|
||||||
name="Floor Demand",
|
translation_key="floor_demand",
|
||||||
),
|
),
|
||||||
AirzoneBinarySensorEntityDescription(
|
AirzoneBinarySensorEntityDescription(
|
||||||
attributes={
|
attributes={
|
||||||
@ -72,7 +69,6 @@ ZONE_BINARY_SENSOR_TYPES: Final[tuple[AirzoneBinarySensorEntityDescription, ...]
|
|||||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
key=AZD_PROBLEMS,
|
key=AZD_PROBLEMS,
|
||||||
name="Problem",
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -149,7 +145,6 @@ class AirzoneSystemBinarySensor(AirzoneSystemEntity, AirzoneBinarySensor):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator, entry, system_data)
|
super().__init__(coordinator, entry, system_data)
|
||||||
self._attr_name = f"System {system_id} {description.name}"
|
|
||||||
self._attr_unique_id = f"{self._attr_unique_id}_{system_id}_{description.key}"
|
self._attr_unique_id = f"{self._attr_unique_id}_{system_id}_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._async_update_attrs()
|
self._async_update_attrs()
|
||||||
@ -169,7 +164,6 @@ class AirzoneZoneBinarySensor(AirzoneZoneEntity, AirzoneBinarySensor):
|
|||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
||||||
|
|
||||||
self._attr_name = f"{zone_data[AZD_NAME]} {description.name}"
|
|
||||||
self._attr_unique_id = (
|
self._attr_unique_id = (
|
||||||
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
||||||
)
|
)
|
||||||
|
@ -19,7 +19,6 @@ from aioairzone.const import (
|
|||||||
AZD_MASTER,
|
AZD_MASTER,
|
||||||
AZD_MODE,
|
AZD_MODE,
|
||||||
AZD_MODES,
|
AZD_MODES,
|
||||||
AZD_NAME,
|
|
||||||
AZD_ON,
|
AZD_ON,
|
||||||
AZD_SPEED,
|
AZD_SPEED,
|
||||||
AZD_SPEEDS,
|
AZD_SPEEDS,
|
||||||
@ -32,6 +31,7 @@ from aioairzone.const import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
|
ATTR_HVAC_MODE,
|
||||||
ATTR_TARGET_TEMP_HIGH,
|
ATTR_TARGET_TEMP_HIGH,
|
||||||
ATTR_TARGET_TEMP_LOW,
|
ATTR_TARGET_TEMP_LOW,
|
||||||
FAN_AUTO,
|
FAN_AUTO,
|
||||||
@ -114,6 +114,7 @@ async def async_setup_entry(
|
|||||||
class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
||||||
"""Define an Airzone sensor."""
|
"""Define an Airzone sensor."""
|
||||||
|
|
||||||
|
_attr_name = None
|
||||||
_speeds: dict[int, str] = {}
|
_speeds: dict[int, str] = {}
|
||||||
_speeds_reverse: dict[str, int] = {}
|
_speeds_reverse: dict[str, int] = {}
|
||||||
|
|
||||||
@ -127,7 +128,6 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
|||||||
"""Initialize Airzone climate entity."""
|
"""Initialize Airzone climate entity."""
|
||||||
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
||||||
|
|
||||||
self._attr_name = f"{zone_data[AZD_NAME]}"
|
|
||||||
self._attr_unique_id = f"{self._attr_unique_id}_{system_zone_id}"
|
self._attr_unique_id = f"{self._attr_unique_id}_{system_zone_id}"
|
||||||
self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
self._attr_target_temperature_step = API_TEMPERATURE_STEP
|
self._attr_target_temperature_step = API_TEMPERATURE_STEP
|
||||||
@ -209,7 +209,9 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
|||||||
await self._async_update_hvac_params(params)
|
await self._async_update_hvac_params(params)
|
||||||
|
|
||||||
if slave_raise:
|
if slave_raise:
|
||||||
raise HomeAssistantError(f"Mode can't be changed on slave zone {self.name}")
|
raise HomeAssistantError(
|
||||||
|
f"Mode can't be changed on slave zone {self.entity_id}"
|
||||||
|
)
|
||||||
|
|
||||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
"""Set new target temperature."""
|
"""Set new target temperature."""
|
||||||
@ -221,6 +223,9 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
|||||||
params[API_HEAT_SET_POINT] = kwargs[ATTR_TARGET_TEMP_LOW]
|
params[API_HEAT_SET_POINT] = kwargs[ATTR_TARGET_TEMP_LOW]
|
||||||
await self._async_update_hvac_params(params)
|
await self._async_update_hvac_params(params)
|
||||||
|
|
||||||
|
if ATTR_HVAC_MODE in kwargs:
|
||||||
|
await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE])
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _handle_coordinator_update(self) -> None:
|
def _handle_coordinator_update(self) -> None:
|
||||||
"""Update attributes when the coordinator updates."""
|
"""Update attributes when the coordinator updates."""
|
||||||
|
@ -39,6 +39,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator]):
|
class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator]):
|
||||||
"""Define an Airzone entity."""
|
"""Define an Airzone entity."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def get_airzone_value(self, key: str) -> Any:
|
def get_airzone_value(self, key: str) -> Any:
|
||||||
"""Return Airzone entity value by key."""
|
"""Return Airzone entity value by key."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
@ -62,7 +64,7 @@ class AirzoneSystemEntity(AirzoneEntity):
|
|||||||
identifiers={(DOMAIN, f"{entry.entry_id}_{self.system_id}")},
|
identifiers={(DOMAIN, f"{entry.entry_id}_{self.system_id}")},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
model=self.get_airzone_value(AZD_MODEL),
|
model=self.get_airzone_value(AZD_MODEL),
|
||||||
name=self.get_airzone_value(AZD_FULL_NAME),
|
name=f"System {self.system_id}",
|
||||||
sw_version=self.get_airzone_value(AZD_FIRMWARE),
|
sw_version=self.get_airzone_value(AZD_FIRMWARE),
|
||||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||||
)
|
)
|
||||||
@ -116,9 +118,7 @@ class AirzoneHotWaterEntity(AirzoneEntity):
|
|||||||
try:
|
try:
|
||||||
await self.coordinator.airzone.set_dhw_parameters(_params)
|
await self.coordinator.airzone.set_dhw_parameters(_params)
|
||||||
except AirzoneError as error:
|
except AirzoneError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(f"Failed to set DHW: {error}") from error
|
||||||
f"Failed to set dhw {self.name}: {error}"
|
|
||||||
) from error
|
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
|
|
||||||
@ -172,7 +172,7 @@ class AirzoneZoneEntity(AirzoneEntity):
|
|||||||
identifiers={(DOMAIN, f"{entry.entry_id}_{system_zone_id}")},
|
identifiers={(DOMAIN, f"{entry.entry_id}_{system_zone_id}")},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
model=self.get_airzone_value(AZD_THERMOSTAT_MODEL),
|
model=self.get_airzone_value(AZD_THERMOSTAT_MODEL),
|
||||||
name=f"Airzone [{system_zone_id}] {zone_data[AZD_NAME]}",
|
name=zone_data[AZD_NAME],
|
||||||
sw_version=self.get_airzone_value(AZD_THERMOSTAT_FW),
|
sw_version=self.get_airzone_value(AZD_THERMOSTAT_FW),
|
||||||
via_device=(DOMAIN, f"{entry.entry_id}_{self.system_id}"),
|
via_device=(DOMAIN, f"{entry.entry_id}_{self.system_id}"),
|
||||||
)
|
)
|
||||||
@ -203,7 +203,7 @@ class AirzoneZoneEntity(AirzoneEntity):
|
|||||||
await self.coordinator.airzone.set_hvac_parameters(_params)
|
await self.coordinator.airzone.set_hvac_parameters(_params)
|
||||||
except AirzoneError as error:
|
except AirzoneError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Failed to set zone {self.name}: {error}"
|
f"Failed to set zone {self.entity_id}: {error}"
|
||||||
) from error
|
) from error
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
|
@ -11,7 +11,6 @@ from aioairzone.const import (
|
|||||||
API_SLEEP,
|
API_SLEEP,
|
||||||
AZD_COLD_ANGLE,
|
AZD_COLD_ANGLE,
|
||||||
AZD_HEAT_ANGLE,
|
AZD_HEAT_ANGLE,
|
||||||
AZD_NAME,
|
|
||||||
AZD_SLEEP,
|
AZD_SLEEP,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
)
|
)
|
||||||
@ -60,7 +59,6 @@ ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
|||||||
api_param=API_COLD_ANGLE,
|
api_param=API_COLD_ANGLE,
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
key=AZD_COLD_ANGLE,
|
key=AZD_COLD_ANGLE,
|
||||||
name="Cold Angle",
|
|
||||||
options=list(GRILLE_ANGLE_DICT),
|
options=list(GRILLE_ANGLE_DICT),
|
||||||
options_dict=GRILLE_ANGLE_DICT,
|
options_dict=GRILLE_ANGLE_DICT,
|
||||||
translation_key="grille_angles",
|
translation_key="grille_angles",
|
||||||
@ -69,16 +67,14 @@ ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
|||||||
api_param=API_HEAT_ANGLE,
|
api_param=API_HEAT_ANGLE,
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
key=AZD_HEAT_ANGLE,
|
key=AZD_HEAT_ANGLE,
|
||||||
name="Heat Angle",
|
|
||||||
options=list(GRILLE_ANGLE_DICT),
|
options=list(GRILLE_ANGLE_DICT),
|
||||||
options_dict=GRILLE_ANGLE_DICT,
|
options_dict=GRILLE_ANGLE_DICT,
|
||||||
translation_key="grille_angles",
|
translation_key="heat_angles",
|
||||||
),
|
),
|
||||||
AirzoneSelectDescription(
|
AirzoneSelectDescription(
|
||||||
api_param=API_SLEEP,
|
api_param=API_SLEEP,
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
key=AZD_SLEEP,
|
key=AZD_SLEEP,
|
||||||
name="Sleep",
|
|
||||||
options=list(SLEEP_DICT),
|
options=list(SLEEP_DICT),
|
||||||
options_dict=SLEEP_DICT,
|
options_dict=SLEEP_DICT,
|
||||||
translation_key="sleep_times",
|
translation_key="sleep_times",
|
||||||
@ -146,7 +142,6 @@ class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
|
|||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
||||||
|
|
||||||
self._attr_name = f"{zone_data[AZD_NAME]} {description.name}"
|
|
||||||
self._attr_unique_id = (
|
self._attr_unique_id = (
|
||||||
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
||||||
)
|
)
|
||||||
|
@ -6,7 +6,6 @@ from typing import Any, Final
|
|||||||
from aioairzone.const import (
|
from aioairzone.const import (
|
||||||
AZD_HOT_WATER,
|
AZD_HOT_WATER,
|
||||||
AZD_HUMIDITY,
|
AZD_HUMIDITY,
|
||||||
AZD_NAME,
|
|
||||||
AZD_TEMP,
|
AZD_TEMP,
|
||||||
AZD_TEMP_UNIT,
|
AZD_TEMP_UNIT,
|
||||||
AZD_WEBSERVER,
|
AZD_WEBSERVER,
|
||||||
@ -54,7 +53,7 @@ WEBSERVER_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
|||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
key=AZD_WIFI_RSSI,
|
key=AZD_WIFI_RSSI,
|
||||||
name="RSSI",
|
translation_key="rssi",
|
||||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
@ -64,14 +63,12 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
|||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
key=AZD_TEMP,
|
key=AZD_TEMP,
|
||||||
name="Temperature",
|
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
device_class=SensorDeviceClass.HUMIDITY,
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
key=AZD_HUMIDITY,
|
key=AZD_HUMIDITY,
|
||||||
name="Humidity",
|
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
@ -144,8 +141,6 @@ class AirzoneSensor(AirzoneEntity, SensorEntity):
|
|||||||
class AirzoneHotWaterSensor(AirzoneHotWaterEntity, AirzoneSensor):
|
class AirzoneHotWaterSensor(AirzoneHotWaterEntity, AirzoneSensor):
|
||||||
"""Define an Airzone Hot Water sensor."""
|
"""Define an Airzone Hot Water sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
@ -176,7 +171,6 @@ class AirzoneWebServerSensor(AirzoneWebServerEntity, AirzoneSensor):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator, entry)
|
super().__init__(coordinator, entry)
|
||||||
self._attr_name = f"WebServer {description.name}"
|
|
||||||
self._attr_unique_id = f"{self._attr_unique_id}_ws_{description.key}"
|
self._attr_unique_id = f"{self._attr_unique_id}_ws_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._async_update_attrs()
|
self._async_update_attrs()
|
||||||
@ -196,7 +190,6 @@ class AirzoneZoneSensor(AirzoneZoneEntity, AirzoneSensor):
|
|||||||
"""Initialize."""
|
"""Initialize."""
|
||||||
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
||||||
|
|
||||||
self._attr_name = f"{zone_data[AZD_NAME]} {description.name}"
|
|
||||||
self._attr_unique_id = (
|
self._attr_unique_id = (
|
||||||
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
||||||
)
|
)
|
||||||
|
@ -25,8 +25,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"air_demand": {
|
||||||
|
"name": "Air demand"
|
||||||
|
},
|
||||||
|
"floor_demand": {
|
||||||
|
"name": "Floor demand"
|
||||||
|
}
|
||||||
|
},
|
||||||
"select": {
|
"select": {
|
||||||
"grille_angles": {
|
"grille_angles": {
|
||||||
|
"name": "Cold angle",
|
||||||
"state": {
|
"state": {
|
||||||
"90deg": "90°",
|
"90deg": "90°",
|
||||||
"50deg": "50°",
|
"50deg": "50°",
|
||||||
@ -34,7 +43,17 @@
|
|||||||
"40deg": "40°"
|
"40deg": "40°"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"heat_angles": {
|
||||||
|
"name": "Heat angle",
|
||||||
|
"state": {
|
||||||
|
"90deg": "[%key:component::airzone::entity::select::grille_angles::state::90deg%]",
|
||||||
|
"50deg": "[%key:component::airzone::entity::select::grille_angles::state::50deg%]",
|
||||||
|
"45deg": "[%key:component::airzone::entity::select::grille_angles::state::45deg%]",
|
||||||
|
"40deg": "[%key:component::airzone::entity::select::grille_angles::state::40deg%]"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sleep_times": {
|
"sleep_times": {
|
||||||
|
"name": "Sleep",
|
||||||
"state": {
|
"state": {
|
||||||
"off": "[%key:common::state::off%]",
|
"off": "[%key:common::state::off%]",
|
||||||
"30m": "30 minutes",
|
"30m": "30 minutes",
|
||||||
@ -42,6 +61,11 @@
|
|||||||
"90m": "90 minutes"
|
"90m": "90 minutes"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"sensor": {
|
||||||
|
"rssi": {
|
||||||
|
"name": "RSSI"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@ from aioairzone.const import (
|
|||||||
API_ACS_POWER_MODE,
|
API_ACS_POWER_MODE,
|
||||||
API_ACS_SET_POINT,
|
API_ACS_SET_POINT,
|
||||||
AZD_HOT_WATER,
|
AZD_HOT_WATER,
|
||||||
AZD_NAME,
|
|
||||||
AZD_OPERATION,
|
AZD_OPERATION,
|
||||||
AZD_OPERATIONS,
|
AZD_OPERATIONS,
|
||||||
AZD_TEMP,
|
AZD_TEMP,
|
||||||
@ -67,6 +66,7 @@ async def async_setup_entry(
|
|||||||
class AirzoneWaterHeater(AirzoneHotWaterEntity, WaterHeaterEntity):
|
class AirzoneWaterHeater(AirzoneHotWaterEntity, WaterHeaterEntity):
|
||||||
"""Define an Airzone Water Heater."""
|
"""Define an Airzone Water Heater."""
|
||||||
|
|
||||||
|
_attr_name = None
|
||||||
_attr_supported_features = (
|
_attr_supported_features = (
|
||||||
WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||||
| WaterHeaterEntityFeature.ON_OFF
|
| WaterHeaterEntityFeature.ON_OFF
|
||||||
@ -81,7 +81,6 @@ class AirzoneWaterHeater(AirzoneHotWaterEntity, WaterHeaterEntity):
|
|||||||
"""Initialize Airzone water heater entity."""
|
"""Initialize Airzone water heater entity."""
|
||||||
super().__init__(coordinator, entry)
|
super().__init__(coordinator, entry)
|
||||||
|
|
||||||
self._attr_name = self.get_airzone_value(AZD_NAME)
|
|
||||||
self._attr_unique_id = f"{self._attr_unique_id}_dhw"
|
self._attr_unique_id = f"{self._attr_unique_id}_dhw"
|
||||||
self._attr_operation_list = [
|
self._attr_operation_list = [
|
||||||
OPERATION_LIB_TO_HASS[operation]
|
OPERATION_LIB_TO_HASS[operation]
|
||||||
|
@ -46,7 +46,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
|
|
||||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
coordinator: AirzoneUpdateCoordinator = hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
|
await coordinator.airzone.logout()
|
||||||
|
|
||||||
return unload_ok
|
return unload_ok
|
||||||
|
@ -159,8 +159,6 @@ class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity):
|
|||||||
class AirzoneAidooBinarySensor(AirzoneAidooEntity, AirzoneBinarySensor):
|
class AirzoneAidooBinarySensor(AirzoneAidooEntity, AirzoneBinarySensor):
|
||||||
"""Define an Airzone Cloud Aidoo binary sensor."""
|
"""Define an Airzone Cloud Aidoo binary sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
@ -180,8 +178,6 @@ class AirzoneAidooBinarySensor(AirzoneAidooEntity, AirzoneBinarySensor):
|
|||||||
class AirzoneSystemBinarySensor(AirzoneSystemEntity, AirzoneBinarySensor):
|
class AirzoneSystemBinarySensor(AirzoneSystemEntity, AirzoneBinarySensor):
|
||||||
"""Define an Airzone Cloud System binary sensor."""
|
"""Define an Airzone Cloud System binary sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
@ -201,8 +197,6 @@ class AirzoneSystemBinarySensor(AirzoneSystemEntity, AirzoneBinarySensor):
|
|||||||
class AirzoneZoneBinarySensor(AirzoneZoneEntity, AirzoneBinarySensor):
|
class AirzoneZoneBinarySensor(AirzoneZoneEntity, AirzoneBinarySensor):
|
||||||
"""Define an Airzone Cloud Zone binary sensor."""
|
"""Define an Airzone Cloud Zone binary sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
|
@ -32,6 +32,7 @@ from aioairzone_cloud.const import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
|
ATTR_HVAC_MODE,
|
||||||
ClimateEntity,
|
ClimateEntity,
|
||||||
ClimateEntityFeature,
|
ClimateEntityFeature,
|
||||||
HVACAction,
|
HVACAction,
|
||||||
@ -142,7 +143,6 @@ async def async_setup_entry(
|
|||||||
class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||||
"""Define an Airzone Cloud climate."""
|
"""Define an Airzone Cloud climate."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
@ -205,6 +205,9 @@ class AirzoneDeviceClimate(AirzoneClimate):
|
|||||||
}
|
}
|
||||||
await self._async_update_params(params)
|
await self._async_update_params(params)
|
||||||
|
|
||||||
|
if ATTR_HVAC_MODE in kwargs:
|
||||||
|
await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE])
|
||||||
|
|
||||||
|
|
||||||
class AirzoneDeviceGroupClimate(AirzoneClimate):
|
class AirzoneDeviceGroupClimate(AirzoneClimate):
|
||||||
"""Define an Airzone Cloud DeviceGroup base class."""
|
"""Define an Airzone Cloud DeviceGroup base class."""
|
||||||
@ -239,6 +242,9 @@ class AirzoneDeviceGroupClimate(AirzoneClimate):
|
|||||||
}
|
}
|
||||||
await self._async_update_params(params)
|
await self._async_update_params(params)
|
||||||
|
|
||||||
|
if ATTR_HVAC_MODE in kwargs:
|
||||||
|
await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE])
|
||||||
|
|
||||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
"""Set hvac mode."""
|
"""Set hvac mode."""
|
||||||
params: dict[str, Any] = {
|
params: dict[str, Any] = {
|
||||||
@ -387,4 +393,6 @@ class AirzoneZoneClimate(AirzoneZoneEntity, AirzoneDeviceClimate):
|
|||||||
await self._async_update_params(params)
|
await self._async_update_params(params)
|
||||||
|
|
||||||
if slave_raise:
|
if slave_raise:
|
||||||
raise HomeAssistantError(f"Mode can't be changed on slave zone {self.name}")
|
raise HomeAssistantError(
|
||||||
|
f"Mode can't be changed on slave zone {self.entity_id}"
|
||||||
|
)
|
||||||
|
@ -34,6 +34,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator], ABC):
|
class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator], ABC):
|
||||||
"""Define an Airzone Cloud entity."""
|
"""Define an Airzone Cloud entity."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return Airzone Cloud entity availability."""
|
"""Return Airzone Cloud entity availability."""
|
||||||
@ -78,14 +80,14 @@ class AirzoneAidooEntity(AirzoneEntity):
|
|||||||
|
|
||||||
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
||||||
"""Send Aidoo parameters to Cloud API."""
|
"""Send Aidoo parameters to Cloud API."""
|
||||||
_LOGGER.debug("aidoo=%s: update_params=%s", self.name, params)
|
_LOGGER.debug("aidoo=%s: update_params=%s", self.entity_id, params)
|
||||||
try:
|
try:
|
||||||
await self.coordinator.airzone.api_set_aidoo_id_params(
|
await self.coordinator.airzone.api_set_aidoo_id_params(
|
||||||
self.aidoo_id, params
|
self.aidoo_id, params
|
||||||
)
|
)
|
||||||
except AirzoneCloudError as error:
|
except AirzoneCloudError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Failed to set {self.name} params: {error}"
|
f"Failed to set {self.entity_id} params: {error}"
|
||||||
) from error
|
) from error
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
@ -120,14 +122,14 @@ class AirzoneGroupEntity(AirzoneEntity):
|
|||||||
|
|
||||||
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
||||||
"""Send Group parameters to Cloud API."""
|
"""Send Group parameters to Cloud API."""
|
||||||
_LOGGER.debug("group=%s: update_params=%s", self.name, params)
|
_LOGGER.debug("group=%s: update_params=%s", self.entity_id, params)
|
||||||
try:
|
try:
|
||||||
await self.coordinator.airzone.api_set_group_id_params(
|
await self.coordinator.airzone.api_set_group_id_params(
|
||||||
self.group_id, params
|
self.group_id, params
|
||||||
)
|
)
|
||||||
except AirzoneCloudError as error:
|
except AirzoneCloudError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Failed to set {self.name} params: {error}"
|
f"Failed to set {self.entity_id} params: {error}"
|
||||||
) from error
|
) from error
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
@ -162,14 +164,18 @@ class AirzoneInstallationEntity(AirzoneEntity):
|
|||||||
|
|
||||||
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
||||||
"""Send Installation parameters to Cloud API."""
|
"""Send Installation parameters to Cloud API."""
|
||||||
_LOGGER.debug("installation=%s: update_params=%s", self.name, params)
|
_LOGGER.debug(
|
||||||
|
"installation=%s: update_params=%s",
|
||||||
|
self.entity_id,
|
||||||
|
params,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
await self.coordinator.airzone.api_set_installation_id_params(
|
await self.coordinator.airzone.api_set_installation_id_params(
|
||||||
self.inst_id, params
|
self.inst_id, params
|
||||||
)
|
)
|
||||||
except AirzoneCloudError as error:
|
except AirzoneCloudError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Failed to set {self.name} params: {error}"
|
f"Failed to set {self.entity_id} params: {error}"
|
||||||
) from error
|
) from error
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
@ -265,12 +271,12 @@ class AirzoneZoneEntity(AirzoneEntity):
|
|||||||
|
|
||||||
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
async def _async_update_params(self, params: dict[str, Any]) -> None:
|
||||||
"""Send Zone parameters to Cloud API."""
|
"""Send Zone parameters to Cloud API."""
|
||||||
_LOGGER.debug("zone=%s: update_params=%s", self.name, params)
|
_LOGGER.debug("zone=%s: update_params=%s", self.entity_id, params)
|
||||||
try:
|
try:
|
||||||
await self.coordinator.airzone.api_set_zone_id_params(self.zone_id, params)
|
await self.coordinator.airzone.api_set_zone_id_params(self.zone_id, params)
|
||||||
except AirzoneCloudError as error:
|
except AirzoneCloudError as error:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Failed to set {self.name} params: {error}"
|
f"Failed to set {self.entity_id} params: {error}"
|
||||||
) from error
|
) from error
|
||||||
|
|
||||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioairzone_cloud"],
|
"loggers": ["aioairzone_cloud"],
|
||||||
"requirements": ["aioairzone-cloud==0.3.5"]
|
"requirements": ["aioairzone-cloud==0.3.6"]
|
||||||
}
|
}
|
||||||
|
@ -141,8 +141,6 @@ class AirzoneSensor(AirzoneEntity, SensorEntity):
|
|||||||
class AirzoneAidooSensor(AirzoneAidooEntity, AirzoneSensor):
|
class AirzoneAidooSensor(AirzoneAidooEntity, AirzoneSensor):
|
||||||
"""Define an Airzone Cloud Aidoo sensor."""
|
"""Define an Airzone Cloud Aidoo sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
@ -162,8 +160,6 @@ class AirzoneAidooSensor(AirzoneAidooEntity, AirzoneSensor):
|
|||||||
class AirzoneWebServerSensor(AirzoneWebServerEntity, AirzoneSensor):
|
class AirzoneWebServerSensor(AirzoneWebServerEntity, AirzoneSensor):
|
||||||
"""Define an Airzone Cloud WebServer sensor."""
|
"""Define an Airzone Cloud WebServer sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
@ -183,8 +179,6 @@ class AirzoneWebServerSensor(AirzoneWebServerEntity, AirzoneSensor):
|
|||||||
class AirzoneZoneSensor(AirzoneZoneEntity, AirzoneSensor):
|
class AirzoneZoneSensor(AirzoneZoneEntity, AirzoneSensor):
|
||||||
"""Define an Airzone Cloud Zone sensor."""
|
"""Define an Airzone Cloud Zone sensor."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: AirzoneUpdateCoordinator,
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
|
@ -14,6 +14,10 @@
|
|||||||
"port": "[%key:common::config_flow::data::port%]",
|
"port": "[%key:common::config_flow::data::port%]",
|
||||||
"device_baudrate": "Device Baud Rate",
|
"device_baudrate": "Device Baud Rate",
|
||||||
"device_path": "Device Path"
|
"device_path": "Device Path"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the AlarmDecoder device that is connected to your alarm panel.",
|
||||||
|
"port": "The port on which AlarmDecoder is accessible (for example, 10000)"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -7,6 +7,9 @@
|
|||||||
"port": "[%key:common::config_flow::data::port%]",
|
"port": "[%key:common::config_flow::data::port%]",
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]"
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The IP address of the device running the Android IP Webcam app. The IP address is shown in the app once you start the server."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -1,44 +1,34 @@
|
|||||||
"""Support for APCUPSd via its Network Information Server (NIS)."""
|
"""Support for APCUPSd via its Network Information Server (NIS)."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Final
|
from typing import Final
|
||||||
|
|
||||||
from apcaccess import status
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
|
||||||
from homeassistant.util import Throttle
|
from .const import DOMAIN
|
||||||
|
from .coordinator import APCUPSdCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DOMAIN: Final = "apcupsd"
|
|
||||||
VALUE_ONLINE: Final = 8
|
|
||||||
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||||
MIN_TIME_BETWEEN_UPDATES: Final = timedelta(seconds=60)
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||||
"""Use config values to set up a function enabling status retrieval."""
|
"""Use config values to set up a function enabling status retrieval."""
|
||||||
data_service = APCUPSdData(
|
host, port = config_entry.data[CONF_HOST], config_entry.data[CONF_PORT]
|
||||||
config_entry.data[CONF_HOST], config_entry.data[CONF_PORT]
|
coordinator = APCUPSdCoordinator(hass, host, port)
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
await coordinator.async_config_entry_first_refresh()
|
||||||
await hass.async_add_executor_job(data_service.update)
|
|
||||||
except OSError as ex:
|
|
||||||
_LOGGER.error("Failure while testing APCUPSd status retrieval: %s", ex)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Store the data service object.
|
# Store the coordinator for later uses.
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
hass.data[DOMAIN][config_entry.entry_id] = data_service
|
hass.data[DOMAIN][config_entry.entry_id] = coordinator
|
||||||
|
|
||||||
# Forward the config entries to the supported platforms.
|
# Forward the config entries to the supported platforms.
|
||||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||||
@ -51,66 +41,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
if unload_ok and DOMAIN in hass.data:
|
if unload_ok and DOMAIN in hass.data:
|
||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
|
|
||||||
class APCUPSdData:
|
|
||||||
"""Stores the data retrieved from APCUPSd.
|
|
||||||
|
|
||||||
For each entity to use, acts as the single point responsible for fetching
|
|
||||||
updates from the server.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, host: str, port: int) -> None:
|
|
||||||
"""Initialize the data object."""
|
|
||||||
self._host = host
|
|
||||||
self._port = port
|
|
||||||
self.status: dict[str, str] = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str | None:
|
|
||||||
"""Return the name of the UPS, if available."""
|
|
||||||
return self.status.get("UPSNAME")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def model(self) -> str | None:
|
|
||||||
"""Return the model of the UPS, if available."""
|
|
||||||
# Different UPS models may report slightly different keys for model, here we
|
|
||||||
# try them all.
|
|
||||||
for model_key in ("APCMODEL", "MODEL"):
|
|
||||||
if model_key in self.status:
|
|
||||||
return self.status[model_key]
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serial_no(self) -> str | None:
|
|
||||||
"""Return the unique serial number of the UPS, if available."""
|
|
||||||
return self.status.get("SERIALNO")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def statflag(self) -> str | None:
|
|
||||||
"""Return the STATFLAG indicating the status of the UPS, if available."""
|
|
||||||
return self.status.get("STATFLAG")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_info(self) -> DeviceInfo | None:
|
|
||||||
"""Return the DeviceInfo of this APC UPS for the sensors, if serial number is available."""
|
|
||||||
if self.serial_no is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return DeviceInfo(
|
|
||||||
identifiers={(DOMAIN, self.serial_no)},
|
|
||||||
model=self.model,
|
|
||||||
manufacturer="APC",
|
|
||||||
name=self.name if self.name is not None else "APC UPS",
|
|
||||||
hw_version=self.status.get("FIRMWARE"),
|
|
||||||
sw_version=self.status.get("VERSION"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
|
||||||
def update(self, **kwargs: Any) -> None:
|
|
||||||
"""Fetch the latest status from APCUPSd.
|
|
||||||
|
|
||||||
Note that the result dict uses upper case for each resource, where our
|
|
||||||
integration uses lower cases as keys internally.
|
|
||||||
"""
|
|
||||||
self.status = status.parse(status.get(host=self._host, port=self._port))
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
BinarySensorEntity,
|
BinarySensorEntity,
|
||||||
@ -10,8 +11,9 @@ from homeassistant.components.binary_sensor import (
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from . import DOMAIN, VALUE_ONLINE, APCUPSdData
|
from . import DOMAIN, APCUPSdCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
_DESCRIPTION = BinarySensorEntityDescription(
|
_DESCRIPTION = BinarySensorEntityDescription(
|
||||||
@ -19,6 +21,8 @@ _DESCRIPTION = BinarySensorEntityDescription(
|
|||||||
name="UPS Online Status",
|
name="UPS Online Status",
|
||||||
icon="mdi:heart",
|
icon="mdi:heart",
|
||||||
)
|
)
|
||||||
|
# The bit in STATFLAG that indicates the online status of the APC UPS.
|
||||||
|
_VALUE_ONLINE_MASK: Final = 0b1000
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@ -27,50 +31,36 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up an APCUPSd Online Status binary sensor."""
|
"""Set up an APCUPSd Online Status binary sensor."""
|
||||||
data_service: APCUPSdData = hass.data[DOMAIN][config_entry.entry_id]
|
coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||||
|
|
||||||
# Do not create the binary sensor if APCUPSd does not provide STATFLAG field for us
|
# Do not create the binary sensor if APCUPSd does not provide STATFLAG field for us
|
||||||
# to determine the online status.
|
# to determine the online status.
|
||||||
if data_service.statflag is None:
|
if _DESCRIPTION.key.upper() not in coordinator.data:
|
||||||
return
|
return
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities([OnlineStatus(coordinator, _DESCRIPTION)])
|
||||||
[OnlineStatus(data_service, _DESCRIPTION)],
|
|
||||||
update_before_add=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OnlineStatus(BinarySensorEntity):
|
class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||||
"""Representation of a UPS online status."""
|
"""Representation of a UPS online status."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
data_service: APCUPSdData,
|
coordinator: APCUPSdCoordinator,
|
||||||
description: BinarySensorEntityDescription,
|
description: BinarySensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the APCUPSd binary device."""
|
"""Initialize the APCUPSd binary device."""
|
||||||
|
super().__init__(coordinator, context=description.key.upper())
|
||||||
|
|
||||||
# Set up unique id and device info if serial number is available.
|
# Set up unique id and device info if serial number is available.
|
||||||
if (serial_no := data_service.serial_no) is not None:
|
if (serial_no := coordinator.ups_serial_no) is not None:
|
||||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||||
self._attr_device_info = data_service.device_info
|
|
||||||
|
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._data_service = data_service
|
self._attr_device_info = coordinator.device_info
|
||||||
|
|
||||||
def update(self) -> None:
|
@property
|
||||||
"""Get the status report from APCUPSd and set this entity's state."""
|
def is_on(self) -> bool | None:
|
||||||
try:
|
"""Returns true if the UPS is online."""
|
||||||
self._data_service.update()
|
# Check if ONLINE bit is set in STATFLAG.
|
||||||
except OSError as ex:
|
|
||||||
if self._attr_available:
|
|
||||||
self._attr_available = False
|
|
||||||
_LOGGER.exception("Got exception while fetching state: %s", ex)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._attr_available = True
|
|
||||||
key = self.entity_description.key.upper()
|
key = self.entity_description.key.upper()
|
||||||
if key not in self._data_service.status:
|
return int(self.coordinator.data[key], 16) & _VALUE_ONLINE_MASK != 0
|
||||||
self._attr_is_on = None
|
|
||||||
return
|
|
||||||
|
|
||||||
self._attr_is_on = int(self._data_service.status[key], 16) & VALUE_ONLINE > 0
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Config flow for APCUPSd integration."""
|
"""Config flow for APCUPSd integration."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@ -10,8 +11,9 @@ from homeassistant.const import CONF_HOST, CONF_PORT
|
|||||||
from homeassistant.data_entry_flow import FlowResult
|
from homeassistant.data_entry_flow import FlowResult
|
||||||
from homeassistant.helpers import selector
|
from homeassistant.helpers import selector
|
||||||
import homeassistant.helpers.config_validation as cv
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
|
|
||||||
from . import DOMAIN, APCUPSdData
|
from . import DOMAIN, APCUPSdCoordinator
|
||||||
|
|
||||||
_PORT_SELECTOR = vol.All(
|
_PORT_SELECTOR = vol.All(
|
||||||
selector.NumberSelector(
|
selector.NumberSelector(
|
||||||
@ -43,36 +45,37 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self.async_show_form(step_id="user", data_schema=_SCHEMA)
|
return self.async_show_form(step_id="user", data_schema=_SCHEMA)
|
||||||
|
|
||||||
|
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
|
||||||
|
|
||||||
# Abort if an entry with same host and port is present.
|
# Abort if an entry with same host and port is present.
|
||||||
self._async_abort_entries_match(
|
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
|
||||||
{CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test the connection to the host and get the current status for serial number.
|
# Test the connection to the host and get the current status for serial number.
|
||||||
data_service = APCUPSdData(user_input[CONF_HOST], user_input[CONF_PORT])
|
coordinator = APCUPSdCoordinator(self.hass, host, port)
|
||||||
try:
|
|
||||||
await self.hass.async_add_executor_job(data_service.update)
|
await coordinator.async_request_refresh()
|
||||||
except OSError:
|
await self.hass.async_block_till_done()
|
||||||
|
if isinstance(coordinator.last_exception, (UpdateFailed, asyncio.TimeoutError)):
|
||||||
errors = {"base": "cannot_connect"}
|
errors = {"base": "cannot_connect"}
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=_SCHEMA, errors=errors
|
step_id="user", data_schema=_SCHEMA, errors=errors
|
||||||
)
|
)
|
||||||
|
|
||||||
if not data_service.status:
|
if not coordinator.data:
|
||||||
return self.async_abort(reason="no_status")
|
return self.async_abort(reason="no_status")
|
||||||
|
|
||||||
# We _try_ to use the serial number of the UPS as the unique id since this field
|
# We _try_ to use the serial number of the UPS as the unique id since this field
|
||||||
# is not guaranteed to exist on all APC UPS models.
|
# is not guaranteed to exist on all APC UPS models.
|
||||||
await self.async_set_unique_id(data_service.serial_no)
|
await self.async_set_unique_id(coordinator.ups_serial_no)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
title = "APC UPS"
|
title = "APC UPS"
|
||||||
if data_service.name is not None:
|
if coordinator.ups_name is not None:
|
||||||
title = data_service.name
|
title = coordinator.ups_name
|
||||||
elif data_service.model is not None:
|
elif coordinator.ups_model is not None:
|
||||||
title = data_service.model
|
title = coordinator.ups_model
|
||||||
elif data_service.serial_no is not None:
|
elif coordinator.ups_serial_no is not None:
|
||||||
title = data_service.serial_no
|
title = coordinator.ups_serial_no
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=title,
|
title=title,
|
||||||
|
4
homeassistant/components/apcupsd/const.py
Normal file
4
homeassistant/components/apcupsd/const.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
"""Constants for APCUPSd component."""
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
DOMAIN: Final = "apcupsd"
|
102
homeassistant/components/apcupsd/coordinator.py
Normal file
102
homeassistant/components/apcupsd/coordinator.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""Support for APCUPSd via its Network Information Server (NIS)."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from collections import OrderedDict
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from apcaccess import status
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.debounce import Debouncer
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.update_coordinator import (
|
||||||
|
REQUEST_REFRESH_DEFAULT_IMMEDIATE,
|
||||||
|
DataUpdateCoordinator,
|
||||||
|
UpdateFailed,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
UPDATE_INTERVAL: Final = timedelta(seconds=60)
|
||||||
|
REQUEST_REFRESH_COOLDOWN: Final = 5
|
||||||
|
|
||||||
|
|
||||||
|
class APCUPSdCoordinator(DataUpdateCoordinator[OrderedDict[str, str]]):
|
||||||
|
"""Store and coordinate the data retrieved from APCUPSd for all sensors.
|
||||||
|
|
||||||
|
For each entity to use, acts as the single point responsible for fetching
|
||||||
|
updates from the server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, host: str, port: int) -> None:
|
||||||
|
"""Initialize the data object."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=UPDATE_INTERVAL,
|
||||||
|
request_refresh_debouncer=Debouncer(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
cooldown=REQUEST_REFRESH_COOLDOWN,
|
||||||
|
immediate=REQUEST_REFRESH_DEFAULT_IMMEDIATE,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self._host = host
|
||||||
|
self._port = port
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ups_name(self) -> str | None:
|
||||||
|
"""Return the name of the UPS, if available."""
|
||||||
|
return self.data.get("UPSNAME")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ups_model(self) -> str | None:
|
||||||
|
"""Return the model of the UPS, if available."""
|
||||||
|
# Different UPS models may report slightly different keys for model, here we
|
||||||
|
# try them all.
|
||||||
|
for model_key in ("APCMODEL", "MODEL"):
|
||||||
|
if model_key in self.data:
|
||||||
|
return self.data[model_key]
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ups_serial_no(self) -> str | None:
|
||||||
|
"""Return the unique serial number of the UPS, if available."""
|
||||||
|
return self.data.get("SERIALNO")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_info(self) -> DeviceInfo | None:
|
||||||
|
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
|
||||||
|
if not self.ups_serial_no:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self.ups_serial_no)},
|
||||||
|
model=self.ups_model,
|
||||||
|
manufacturer="APC",
|
||||||
|
name=self.ups_name if self.ups_name else "APC UPS",
|
||||||
|
hw_version=self.data.get("FIRMWARE"),
|
||||||
|
sw_version=self.data.get("VERSION"),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> OrderedDict[str, str]:
|
||||||
|
"""Fetch the latest status from APCUPSd.
|
||||||
|
|
||||||
|
Note that the result dict uses upper case for each resource, where our
|
||||||
|
integration uses lower cases as keys internally.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async with asyncio.timeout(10):
|
||||||
|
try:
|
||||||
|
raw = await self.hass.async_add_executor_job(
|
||||||
|
status.get, self._host, self._port
|
||||||
|
)
|
||||||
|
result: OrderedDict[str, str] = status.parse(raw)
|
||||||
|
return result
|
||||||
|
except OSError as error:
|
||||||
|
raise UpdateFailed(error) from error
|
@ -6,5 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["apcaccess"],
|
"loggers": ["apcaccess"],
|
||||||
|
"quality_scale": "silver",
|
||||||
"requirements": ["apcaccess==0.0.13"]
|
"requirements": ["apcaccess==0.0.13"]
|
||||||
}
|
}
|
||||||
|
@ -20,10 +20,11 @@ from homeassistant.const import (
|
|||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
UnitOfTime,
|
UnitOfTime,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from . import DOMAIN, APCUPSdData
|
from . import DOMAIN, APCUPSdCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -452,11 +453,11 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the APCUPSd sensors from config entries."""
|
"""Set up the APCUPSd sensors from config entries."""
|
||||||
data_service: APCUPSdData = hass.data[DOMAIN][config_entry.entry_id]
|
coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||||
|
|
||||||
# The resources from data service are in upper-case by default, but we use
|
# The resource keys in the data dict collected in the coordinator is in upper-case
|
||||||
# lower cases throughout this integration.
|
# by default, but we use lower cases throughout this integration.
|
||||||
available_resources: set[str] = {k.lower() for k, _ in data_service.status.items()}
|
available_resources: set[str] = {k.lower() for k, _ in coordinator.data.items()}
|
||||||
|
|
||||||
entities = []
|
entities = []
|
||||||
for resource in available_resources:
|
for resource in available_resources:
|
||||||
@ -464,9 +465,9 @@ async def async_setup_entry(
|
|||||||
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
||||||
continue
|
continue
|
||||||
|
|
||||||
entities.append(APCUPSdSensor(data_service, SENSORS[resource]))
|
entities.append(APCUPSdSensor(coordinator, SENSORS[resource]))
|
||||||
|
|
||||||
async_add_entities(entities, update_before_add=True)
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
def infer_unit(value: str) -> tuple[str, str | None]:
|
def infer_unit(value: str) -> tuple[str, str | None]:
|
||||||
@ -483,41 +484,36 @@ def infer_unit(value: str) -> tuple[str, str | None]:
|
|||||||
return value, None
|
return value, None
|
||||||
|
|
||||||
|
|
||||||
class APCUPSdSensor(SensorEntity):
|
class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||||
"""Representation of a sensor entity for APCUPSd status values."""
|
"""Representation of a sensor entity for APCUPSd status values."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
data_service: APCUPSdData,
|
coordinator: APCUPSdCoordinator,
|
||||||
description: SensorEntityDescription,
|
description: SensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
|
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||||
|
|
||||||
# Set up unique id and device info if serial number is available.
|
# Set up unique id and device info if serial number is available.
|
||||||
if (serial_no := data_service.serial_no) is not None:
|
if (serial_no := coordinator.ups_serial_no) is not None:
|
||||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||||
self._attr_device_info = data_service.device_info
|
|
||||||
|
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._data_service = data_service
|
self._attr_device_info = coordinator.device_info
|
||||||
|
|
||||||
def update(self) -> None:
|
# Initial update of attributes.
|
||||||
"""Get the latest status and use it to update our sensor state."""
|
self._update_attrs()
|
||||||
try:
|
|
||||||
self._data_service.update()
|
|
||||||
except OSError as ex:
|
|
||||||
if self._attr_available:
|
|
||||||
self._attr_available = False
|
|
||||||
_LOGGER.exception("Got exception while fetching state: %s", ex)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._attr_available = True
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
self._update_attrs()
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
def _update_attrs(self) -> None:
|
||||||
|
"""Update sensor attributes based on coordinator data."""
|
||||||
key = self.entity_description.key.upper()
|
key = self.entity_description.key.upper()
|
||||||
if key not in self._data_service.status:
|
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||||
self._attr_native_value = None
|
|
||||||
return
|
|
||||||
|
|
||||||
self._attr_native_value, inferred_unit = infer_unit(
|
|
||||||
self._data_service.status[key]
|
|
||||||
)
|
|
||||||
if not self.native_unit_of_measurement:
|
if not self.native_unit_of_measurement:
|
||||||
self._attr_native_unit_of_measurement = inferred_unit
|
self._attr_native_unit_of_measurement = inferred_unit
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
"""Rest API for Home Assistant."""
|
"""Rest API for Home Assistant."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from asyncio import timeout
|
from asyncio import shield, timeout
|
||||||
|
from collections.abc import Collection
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.web_exceptions import HTTPBadRequest
|
from aiohttp.web_exceptions import HTTPBadRequest
|
||||||
@ -16,6 +18,7 @@ from homeassistant.components.http import HomeAssistantView, require_admin
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONTENT_TYPE_JSON,
|
CONTENT_TYPE_JSON,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
|
EVENT_STATE_CHANGED,
|
||||||
MATCH_ALL,
|
MATCH_ALL,
|
||||||
URL_API,
|
URL_API,
|
||||||
URL_API_COMPONENTS,
|
URL_API_COMPONENTS,
|
||||||
@ -38,10 +41,12 @@ from homeassistant.exceptions import (
|
|||||||
Unauthorized,
|
Unauthorized,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import config_validation as cv, template
|
from homeassistant.helpers import config_validation as cv, template
|
||||||
|
from homeassistant.helpers.event import EventStateChangedData
|
||||||
from homeassistant.helpers.json import json_dumps
|
from homeassistant.helpers.json import json_dumps
|
||||||
from homeassistant.helpers.service import async_get_all_descriptions
|
from homeassistant.helpers.service import async_get_all_descriptions
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType, EventType
|
||||||
from homeassistant.util.json import json_loads
|
from homeassistant.util.json import json_loads
|
||||||
|
from homeassistant.util.read_only_dict import ReadOnlyDict
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -57,6 +62,7 @@ ATTR_VERSION = "version"
|
|||||||
DOMAIN = "api"
|
DOMAIN = "api"
|
||||||
STREAM_PING_PAYLOAD = "ping"
|
STREAM_PING_PAYLOAD = "ping"
|
||||||
STREAM_PING_INTERVAL = 50 # seconds
|
STREAM_PING_INTERVAL = 50 # seconds
|
||||||
|
SERVICE_WAIT_TIMEOUT = 10
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||||
|
|
||||||
@ -211,7 +217,9 @@ class APIStatesView(HomeAssistantView):
|
|||||||
if entity_perm(state.entity_id, "read")
|
if entity_perm(state.entity_id, "read")
|
||||||
)
|
)
|
||||||
response = web.Response(
|
response = web.Response(
|
||||||
body=f'[{",".join(states)}]', content_type=CONTENT_TYPE_JSON
|
body=f'[{",".join(states)}]',
|
||||||
|
content_type=CONTENT_TYPE_JSON,
|
||||||
|
zlib_executor_size=32768,
|
||||||
)
|
)
|
||||||
response.enable_compression()
|
response.enable_compression()
|
||||||
return response
|
return response
|
||||||
@ -369,19 +377,30 @@ class APIDomainServicesView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
context = self.context(request)
|
context = self.context(request)
|
||||||
|
changed_states: list[ReadOnlyDict[str, Collection[Any]]] = []
|
||||||
|
|
||||||
|
@ha.callback
|
||||||
|
def _async_save_changed_entities(
|
||||||
|
event: EventType[EventStateChangedData],
|
||||||
|
) -> None:
|
||||||
|
if event.context == context and (state := event.data["new_state"]):
|
||||||
|
changed_states.append(state.as_dict())
|
||||||
|
|
||||||
|
cancel_listen = hass.bus.async_listen(
|
||||||
|
EVENT_STATE_CHANGED, _async_save_changed_entities, run_immediately=True
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await hass.services.async_call(
|
# shield the service call from cancellation on connection drop
|
||||||
domain, service, data, blocking=True, context=context
|
await shield(
|
||||||
|
hass.services.async_call(
|
||||||
|
domain, service, data, blocking=True, context=context
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except (vol.Invalid, ServiceNotFound) as ex:
|
except (vol.Invalid, ServiceNotFound) as ex:
|
||||||
raise HTTPBadRequest() from ex
|
raise HTTPBadRequest() from ex
|
||||||
|
finally:
|
||||||
changed_states = []
|
cancel_listen()
|
||||||
|
|
||||||
for state in hass.states.async_all():
|
|
||||||
if state.context is context:
|
|
||||||
changed_states.append(state)
|
|
||||||
|
|
||||||
return self.json(changed_states)
|
return self.json(changed_states)
|
||||||
|
|
||||||
|
@ -9,7 +9,13 @@ from homeassistant.components import stt
|
|||||||
from homeassistant.core import Context, HomeAssistant
|
from homeassistant.core import Context, HomeAssistant
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import CONF_DEBUG_RECORDING_DIR, DATA_CONFIG, DATA_LAST_WAKE_UP, DOMAIN
|
from .const import (
|
||||||
|
CONF_DEBUG_RECORDING_DIR,
|
||||||
|
DATA_CONFIG,
|
||||||
|
DATA_LAST_WAKE_UP,
|
||||||
|
DOMAIN,
|
||||||
|
EVENT_RECORDING,
|
||||||
|
)
|
||||||
from .error import PipelineNotFound
|
from .error import PipelineNotFound
|
||||||
from .pipeline import (
|
from .pipeline import (
|
||||||
AudioSettings,
|
AudioSettings,
|
||||||
@ -40,6 +46,7 @@ __all__ = (
|
|||||||
"PipelineEventType",
|
"PipelineEventType",
|
||||||
"PipelineNotFound",
|
"PipelineNotFound",
|
||||||
"WakeWordSettings",
|
"WakeWordSettings",
|
||||||
|
"EVENT_RECORDING",
|
||||||
)
|
)
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
|
@ -11,3 +11,5 @@ CONF_DEBUG_RECORDING_DIR = "debug_recording_dir"
|
|||||||
|
|
||||||
DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up"
|
DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up"
|
||||||
DEFAULT_WAKE_WORD_COOLDOWN = 2 # seconds
|
DEFAULT_WAKE_WORD_COOLDOWN = 2 # seconds
|
||||||
|
|
||||||
|
EVENT_RECORDING = f"{DOMAIN}_recording"
|
||||||
|
39
homeassistant/components/assist_pipeline/logbook.py
Normal file
39
homeassistant/components/assist_pipeline/logbook.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
"""Describe assist_pipeline logbook events."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
from homeassistant.components.logbook import LOGBOOK_ENTRY_MESSAGE, LOGBOOK_ENTRY_NAME
|
||||||
|
from homeassistant.const import ATTR_DEVICE_ID
|
||||||
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
|
import homeassistant.helpers.device_registry as dr
|
||||||
|
|
||||||
|
from .const import DOMAIN, EVENT_RECORDING
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_describe_events(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None],
|
||||||
|
) -> None:
|
||||||
|
"""Describe logbook events."""
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_describe_logbook_event(event: Event) -> dict[str, str]:
|
||||||
|
"""Describe logbook event."""
|
||||||
|
device: dr.DeviceEntry | None = None
|
||||||
|
device_name: str = "Unknown device"
|
||||||
|
|
||||||
|
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
|
||||||
|
if device:
|
||||||
|
device_name = device.name_by_user or device.name or "Unknown device"
|
||||||
|
|
||||||
|
message = f"{device_name} captured an audio sample"
|
||||||
|
|
||||||
|
return {
|
||||||
|
LOGBOOK_ENTRY_NAME: device_name,
|
||||||
|
LOGBOOK_ENTRY_MESSAGE: message,
|
||||||
|
}
|
||||||
|
|
||||||
|
async_describe_event(DOMAIN, EVENT_RECORDING, async_describe_logbook_event)
|
@ -320,7 +320,7 @@ class Pipeline:
|
|||||||
wake_word_entity: str | None
|
wake_word_entity: str | None
|
||||||
wake_word_id: str | None
|
wake_word_id: str | None
|
||||||
|
|
||||||
id: str = field(default_factory=ulid_util.ulid)
|
id: str = field(default_factory=ulid_util.ulid_now)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, data: dict[str, Any]) -> Pipeline:
|
def from_json(cls, data: dict[str, Any]) -> Pipeline:
|
||||||
@ -482,7 +482,7 @@ class PipelineRun:
|
|||||||
wake_word_settings: WakeWordSettings | None = None
|
wake_word_settings: WakeWordSettings | None = None
|
||||||
audio_settings: AudioSettings = field(default_factory=AudioSettings)
|
audio_settings: AudioSettings = field(default_factory=AudioSettings)
|
||||||
|
|
||||||
id: str = field(default_factory=ulid_util.ulid)
|
id: str = field(default_factory=ulid_util.ulid_now)
|
||||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||||
tts_engine: str = field(init=False, repr=False)
|
tts_engine: str = field(init=False, repr=False)
|
||||||
tts_options: dict | None = field(init=False, default=None)
|
tts_options: dict | None = field(init=False, default=None)
|
||||||
@ -503,6 +503,9 @@ class PipelineRun:
|
|||||||
audio_processor_buffer: AudioBuffer = field(init=False, repr=False)
|
audio_processor_buffer: AudioBuffer = field(init=False, repr=False)
|
||||||
"""Buffer used when splitting audio into chunks for audio processing"""
|
"""Buffer used when splitting audio into chunks for audio processing"""
|
||||||
|
|
||||||
|
_device_id: str | None = None
|
||||||
|
"""Optional device id set during run start."""
|
||||||
|
|
||||||
def __post_init__(self) -> None:
|
def __post_init__(self) -> None:
|
||||||
"""Set language for pipeline."""
|
"""Set language for pipeline."""
|
||||||
self.language = self.pipeline.language or self.hass.config.language
|
self.language = self.pipeline.language or self.hass.config.language
|
||||||
@ -554,7 +557,8 @@ class PipelineRun:
|
|||||||
|
|
||||||
def start(self, device_id: str | None) -> None:
|
def start(self, device_id: str | None) -> None:
|
||||||
"""Emit run start event."""
|
"""Emit run start event."""
|
||||||
self._start_debug_recording_thread(device_id)
|
self._device_id = device_id
|
||||||
|
self._start_debug_recording_thread()
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"pipeline": self.pipeline.id,
|
"pipeline": self.pipeline.id,
|
||||||
@ -567,6 +571,9 @@ class PipelineRun:
|
|||||||
|
|
||||||
async def end(self) -> None:
|
async def end(self) -> None:
|
||||||
"""Emit run end event."""
|
"""Emit run end event."""
|
||||||
|
# Signal end of stream to listeners
|
||||||
|
self._capture_chunk(None)
|
||||||
|
|
||||||
# Stop the recording thread before emitting run-end.
|
# Stop the recording thread before emitting run-end.
|
||||||
# This ensures that files are properly closed if the event handler reads them.
|
# This ensures that files are properly closed if the event handler reads them.
|
||||||
await self._stop_debug_recording_thread()
|
await self._stop_debug_recording_thread()
|
||||||
@ -746,9 +753,7 @@ class PipelineRun:
|
|||||||
if self.abort_wake_word_detection:
|
if self.abort_wake_word_detection:
|
||||||
raise WakeWordDetectionAborted
|
raise WakeWordDetectionAborted
|
||||||
|
|
||||||
if self.debug_recording_queue is not None:
|
self._capture_chunk(chunk.audio)
|
||||||
self.debug_recording_queue.put_nowait(chunk.audio)
|
|
||||||
|
|
||||||
yield chunk.audio, chunk.timestamp_ms
|
yield chunk.audio, chunk.timestamp_ms
|
||||||
|
|
||||||
# Wake-word-detection occurs *after* the wake word was actually
|
# Wake-word-detection occurs *after* the wake word was actually
|
||||||
@ -870,8 +875,7 @@ class PipelineRun:
|
|||||||
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
|
chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate
|
||||||
sent_vad_start = False
|
sent_vad_start = False
|
||||||
async for chunk in audio_stream:
|
async for chunk in audio_stream:
|
||||||
if self.debug_recording_queue is not None:
|
self._capture_chunk(chunk.audio)
|
||||||
self.debug_recording_queue.put_nowait(chunk.audio)
|
|
||||||
|
|
||||||
if stt_vad is not None:
|
if stt_vad is not None:
|
||||||
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
||||||
@ -971,12 +975,16 @@ class PipelineRun:
|
|||||||
# pipeline.tts_engine can't be None or this function is not called
|
# pipeline.tts_engine can't be None or this function is not called
|
||||||
engine = cast(str, self.pipeline.tts_engine)
|
engine = cast(str, self.pipeline.tts_engine)
|
||||||
|
|
||||||
tts_options = {}
|
tts_options: dict[str, Any] = {}
|
||||||
if self.pipeline.tts_voice is not None:
|
if self.pipeline.tts_voice is not None:
|
||||||
tts_options[tts.ATTR_VOICE] = self.pipeline.tts_voice
|
tts_options[tts.ATTR_VOICE] = self.pipeline.tts_voice
|
||||||
|
|
||||||
if self.tts_audio_output is not None:
|
if self.tts_audio_output is not None:
|
||||||
tts_options[tts.ATTR_AUDIO_OUTPUT] = self.tts_audio_output
|
tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output
|
||||||
|
if self.tts_audio_output == "wav":
|
||||||
|
# 16 Khz, 16-bit mono
|
||||||
|
tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = 16000
|
||||||
|
tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = 1
|
||||||
|
|
||||||
try:
|
try:
|
||||||
options_supported = await tts.async_support_options(
|
options_supported = await tts.async_support_options(
|
||||||
@ -1016,44 +1024,64 @@ class PipelineRun:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
if tts_input := tts_input.strip():
|
||||||
# Synthesize audio and get URL
|
try:
|
||||||
tts_media_id = tts_generate_media_source_id(
|
# Synthesize audio and get URL
|
||||||
self.hass,
|
tts_media_id = tts_generate_media_source_id(
|
||||||
tts_input,
|
self.hass,
|
||||||
engine=self.tts_engine,
|
tts_input,
|
||||||
language=self.pipeline.tts_language,
|
engine=self.tts_engine,
|
||||||
options=self.tts_options,
|
language=self.pipeline.tts_language,
|
||||||
)
|
options=self.tts_options,
|
||||||
tts_media = await media_source.async_resolve_media(
|
)
|
||||||
self.hass,
|
tts_media = await media_source.async_resolve_media(
|
||||||
tts_media_id,
|
self.hass,
|
||||||
None,
|
tts_media_id,
|
||||||
)
|
None,
|
||||||
except Exception as src_error:
|
)
|
||||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
except Exception as src_error:
|
||||||
raise TextToSpeechError(
|
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||||
code="tts-failed",
|
raise TextToSpeechError(
|
||||||
message="Unexpected error during text-to-speech",
|
code="tts-failed",
|
||||||
) from src_error
|
message="Unexpected error during text-to-speech",
|
||||||
|
) from src_error
|
||||||
|
|
||||||
_LOGGER.debug("TTS result %s", tts_media)
|
_LOGGER.debug("TTS result %s", tts_media)
|
||||||
|
tts_output = {
|
||||||
|
"media_id": tts_media_id,
|
||||||
|
**asdict(tts_media),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
tts_output = {}
|
||||||
|
|
||||||
self.process_event(
|
self.process_event(
|
||||||
PipelineEvent(
|
PipelineEvent(PipelineEventType.TTS_END, {"tts_output": tts_output})
|
||||||
PipelineEventType.TTS_END,
|
|
||||||
{
|
|
||||||
"tts_output": {
|
|
||||||
"media_id": tts_media_id,
|
|
||||||
**asdict(tts_media),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return tts_media.url
|
return tts_media.url
|
||||||
|
|
||||||
def _start_debug_recording_thread(self, device_id: str | None) -> None:
|
def _capture_chunk(self, audio_bytes: bytes | None) -> None:
|
||||||
|
"""Forward audio chunk to various capturing mechanisms."""
|
||||||
|
if self.debug_recording_queue is not None:
|
||||||
|
# Forward to debug WAV file recording
|
||||||
|
self.debug_recording_queue.put_nowait(audio_bytes)
|
||||||
|
|
||||||
|
if self._device_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Forward to device audio capture
|
||||||
|
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||||
|
audio_queue = pipeline_data.device_audio_queues.get(self._device_id)
|
||||||
|
if audio_queue is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
audio_queue.queue.put_nowait(audio_bytes)
|
||||||
|
except asyncio.QueueFull:
|
||||||
|
audio_queue.overflow = True
|
||||||
|
_LOGGER.warning("Audio queue full for device %s", self._device_id)
|
||||||
|
|
||||||
|
def _start_debug_recording_thread(self) -> None:
|
||||||
"""Start thread to record wake/stt audio if debug_recording_dir is set."""
|
"""Start thread to record wake/stt audio if debug_recording_dir is set."""
|
||||||
if self.debug_recording_thread is not None:
|
if self.debug_recording_thread is not None:
|
||||||
# Already started
|
# Already started
|
||||||
@ -1064,7 +1092,7 @@ class PipelineRun:
|
|||||||
if debug_recording_dir := self.hass.data[DATA_CONFIG].get(
|
if debug_recording_dir := self.hass.data[DATA_CONFIG].get(
|
||||||
CONF_DEBUG_RECORDING_DIR
|
CONF_DEBUG_RECORDING_DIR
|
||||||
):
|
):
|
||||||
if device_id is None:
|
if self._device_id is None:
|
||||||
# <debug_recording_dir>/<pipeline.name>/<run.id>
|
# <debug_recording_dir>/<pipeline.name>/<run.id>
|
||||||
run_recording_dir = (
|
run_recording_dir = (
|
||||||
Path(debug_recording_dir)
|
Path(debug_recording_dir)
|
||||||
@ -1075,7 +1103,7 @@ class PipelineRun:
|
|||||||
# <debug_recording_dir>/<device_id>/<pipeline.name>/<run.id>
|
# <debug_recording_dir>/<device_id>/<pipeline.name>/<run.id>
|
||||||
run_recording_dir = (
|
run_recording_dir = (
|
||||||
Path(debug_recording_dir)
|
Path(debug_recording_dir)
|
||||||
/ device_id
|
/ self._device_id
|
||||||
/ self.pipeline.name
|
/ self.pipeline.name
|
||||||
/ str(time.monotonic_ns())
|
/ str(time.monotonic_ns())
|
||||||
)
|
)
|
||||||
@ -1096,8 +1124,8 @@ class PipelineRun:
|
|||||||
# Not running
|
# Not running
|
||||||
return
|
return
|
||||||
|
|
||||||
# Signal thread to stop gracefully
|
# NOTE: Expecting a None to have been put in self.debug_recording_queue
|
||||||
self.debug_recording_queue.put(None)
|
# in self.end() to signal the thread to stop.
|
||||||
|
|
||||||
# Wait until the thread has finished to ensure that files are fully written
|
# Wait until the thread has finished to ensure that files are fully written
|
||||||
await self.hass.async_add_executor_job(self.debug_recording_thread.join)
|
await self.hass.async_add_executor_job(self.debug_recording_thread.join)
|
||||||
@ -1286,9 +1314,9 @@ class PipelineInput:
|
|||||||
if stt_audio_buffer:
|
if stt_audio_buffer:
|
||||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||||
# This is basically an async itertools.chain.
|
# This is basically an async itertools.chain.
|
||||||
async def buffer_then_audio_stream() -> AsyncGenerator[
|
async def buffer_then_audio_stream() -> (
|
||||||
ProcessedAudioChunk, None
|
AsyncGenerator[ProcessedAudioChunk, None]
|
||||||
]:
|
):
|
||||||
# Buffered audio
|
# Buffered audio
|
||||||
for chunk in stt_audio_buffer:
|
for chunk in stt_audio_buffer:
|
||||||
yield chunk
|
yield chunk
|
||||||
@ -1447,7 +1475,7 @@ class PipelineStorageCollection(
|
|||||||
@callback
|
@callback
|
||||||
def _get_suggested_id(self, info: dict) -> str:
|
def _get_suggested_id(self, info: dict) -> str:
|
||||||
"""Suggest an ID based on the config."""
|
"""Suggest an ID based on the config."""
|
||||||
return ulid_util.ulid()
|
return ulid_util.ulid_now()
|
||||||
|
|
||||||
async def _update_data(self, item: Pipeline, update_data: dict) -> Pipeline:
|
async def _update_data(self, item: Pipeline, update_data: dict) -> Pipeline:
|
||||||
"""Return a new updated item."""
|
"""Return a new updated item."""
|
||||||
@ -1628,6 +1656,20 @@ class PipelineRuns:
|
|||||||
pipeline_run.abort_wake_word_detection = True
|
pipeline_run.abort_wake_word_detection = True
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DeviceAudioQueue:
|
||||||
|
"""Audio capture queue for a satellite device."""
|
||||||
|
|
||||||
|
queue: asyncio.Queue[bytes | None]
|
||||||
|
"""Queue of audio chunks (None = stop signal)"""
|
||||||
|
|
||||||
|
id: str = field(default_factory=ulid_util.ulid_now)
|
||||||
|
"""Unique id to ensure the correct audio queue is cleaned up in websocket API."""
|
||||||
|
|
||||||
|
overflow: bool = False
|
||||||
|
"""Flag to be set if audio samples were dropped because the queue was full."""
|
||||||
|
|
||||||
|
|
||||||
class PipelineData:
|
class PipelineData:
|
||||||
"""Store and debug data stored in hass.data."""
|
"""Store and debug data stored in hass.data."""
|
||||||
|
|
||||||
@ -1637,6 +1679,7 @@ class PipelineData:
|
|||||||
self.pipeline_debug: dict[str, LimitedSizeDict[str, PipelineRunDebug]] = {}
|
self.pipeline_debug: dict[str, LimitedSizeDict[str, PipelineRunDebug]] = {}
|
||||||
self.pipeline_devices: set[str] = set()
|
self.pipeline_devices: set[str] = set()
|
||||||
self.pipeline_runs = PipelineRuns(pipeline_store)
|
self.pipeline_runs = PipelineRuns(pipeline_store)
|
||||||
|
self.device_audio_queues: dict[str, DeviceAudioQueue] = {}
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -93,9 +93,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
|||||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||||
pipeline_data.pipeline_devices.add(device_id)
|
pipeline_data.pipeline_devices.add(device_id)
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
lambda: pipeline_data.pipeline_devices.discard(
|
lambda: pipeline_data.pipeline_devices.discard(device_id)
|
||||||
device_id # type: ignore[arg-type]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_select_option(self, option: str) -> None:
|
async def async_select_option(self, option: str) -> None:
|
||||||
|
@ -3,22 +3,31 @@ import asyncio
|
|||||||
|
|
||||||
# Suppressing disable=deprecated-module is needed for Python 3.11
|
# Suppressing disable=deprecated-module is needed for Python 3.11
|
||||||
import audioop # pylint: disable=deprecated-module
|
import audioop # pylint: disable=deprecated-module
|
||||||
|
import base64
|
||||||
from collections.abc import AsyncGenerator, Callable
|
from collections.abc import AsyncGenerator, Callable
|
||||||
|
import contextlib
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
import math
|
||||||
|
from typing import Any, Final
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import conversation, stt, tts, websocket_api
|
from homeassistant.components import conversation, stt, tts, websocket_api
|
||||||
from homeassistant.const import MATCH_ALL
|
from homeassistant.const import ATTR_DEVICE_ID, ATTR_SECONDS, MATCH_ALL
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.util import language as language_util
|
from homeassistant.util import language as language_util
|
||||||
|
|
||||||
from .const import DEFAULT_PIPELINE_TIMEOUT, DEFAULT_WAKE_WORD_TIMEOUT, DOMAIN
|
from .const import (
|
||||||
|
DEFAULT_PIPELINE_TIMEOUT,
|
||||||
|
DEFAULT_WAKE_WORD_TIMEOUT,
|
||||||
|
DOMAIN,
|
||||||
|
EVENT_RECORDING,
|
||||||
|
)
|
||||||
from .error import PipelineNotFound
|
from .error import PipelineNotFound
|
||||||
from .pipeline import (
|
from .pipeline import (
|
||||||
AudioSettings,
|
AudioSettings,
|
||||||
|
DeviceAudioQueue,
|
||||||
PipelineData,
|
PipelineData,
|
||||||
PipelineError,
|
PipelineError,
|
||||||
PipelineEvent,
|
PipelineEvent,
|
||||||
@ -32,6 +41,11 @@ from .pipeline import (
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CAPTURE_RATE: Final = 16000
|
||||||
|
CAPTURE_WIDTH: Final = 2
|
||||||
|
CAPTURE_CHANNELS: Final = 1
|
||||||
|
MAX_CAPTURE_TIMEOUT: Final = 60.0
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_register_websocket_api(hass: HomeAssistant) -> None:
|
def async_register_websocket_api(hass: HomeAssistant) -> None:
|
||||||
@ -40,6 +54,7 @@ def async_register_websocket_api(hass: HomeAssistant) -> None:
|
|||||||
websocket_api.async_register_command(hass, websocket_list_languages)
|
websocket_api.async_register_command(hass, websocket_list_languages)
|
||||||
websocket_api.async_register_command(hass, websocket_list_runs)
|
websocket_api.async_register_command(hass, websocket_list_runs)
|
||||||
websocket_api.async_register_command(hass, websocket_get_run)
|
websocket_api.async_register_command(hass, websocket_get_run)
|
||||||
|
websocket_api.async_register_command(hass, websocket_device_capture)
|
||||||
|
|
||||||
|
|
||||||
@websocket_api.websocket_command(
|
@websocket_api.websocket_command(
|
||||||
@ -371,3 +386,100 @@ async def websocket_list_languages(
|
|||||||
else pipeline_languages
|
else pipeline_languages
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@websocket_api.require_admin
|
||||||
|
@websocket_api.websocket_command(
|
||||||
|
{
|
||||||
|
vol.Required("type"): "assist_pipeline/device/capture",
|
||||||
|
vol.Required("device_id"): str,
|
||||||
|
vol.Required("timeout"): vol.All(
|
||||||
|
# 0 < timeout <= MAX_CAPTURE_TIMEOUT
|
||||||
|
vol.Coerce(float),
|
||||||
|
vol.Range(min=0, min_included=False, max=MAX_CAPTURE_TIMEOUT),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@websocket_api.async_response
|
||||||
|
async def websocket_device_capture(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
connection: websocket_api.connection.ActiveConnection,
|
||||||
|
msg: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Capture raw audio from a satellite device and forward to client."""
|
||||||
|
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||||
|
device_id = msg["device_id"]
|
||||||
|
|
||||||
|
# Number of seconds to record audio in wall clock time
|
||||||
|
timeout_seconds = msg["timeout"]
|
||||||
|
|
||||||
|
# We don't know the chunk size, so the upper bound is calculated assuming a
|
||||||
|
# single sample (16 bits) per queue item.
|
||||||
|
max_queue_items = (
|
||||||
|
# +1 for None to signal end
|
||||||
|
int(math.ceil(timeout_seconds * CAPTURE_RATE)) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items))
|
||||||
|
|
||||||
|
# Running simultaneous captures for a single device will not work by design.
|
||||||
|
# The new capture will cause the old capture to stop.
|
||||||
|
if (
|
||||||
|
old_audio_queue := pipeline_data.device_audio_queues.pop(device_id, None)
|
||||||
|
) is not None:
|
||||||
|
with contextlib.suppress(asyncio.QueueFull):
|
||||||
|
# Signal other websocket command that we're taking over
|
||||||
|
old_audio_queue.queue.put_nowait(None)
|
||||||
|
|
||||||
|
# Only one client can be capturing audio at a time
|
||||||
|
pipeline_data.device_audio_queues[device_id] = audio_queue
|
||||||
|
|
||||||
|
def clean_up_queue() -> None:
|
||||||
|
# Clean up our audio queue
|
||||||
|
maybe_audio_queue = pipeline_data.device_audio_queues.get(device_id)
|
||||||
|
if (maybe_audio_queue is not None) and (maybe_audio_queue.id == audio_queue.id):
|
||||||
|
# Only pop if this is our queue
|
||||||
|
pipeline_data.device_audio_queues.pop(device_id)
|
||||||
|
|
||||||
|
# Unsubscribe cleans up queue
|
||||||
|
connection.subscriptions[msg["id"]] = clean_up_queue
|
||||||
|
|
||||||
|
# Audio will follow as events
|
||||||
|
connection.send_result(msg["id"])
|
||||||
|
|
||||||
|
# Record to logbook
|
||||||
|
hass.bus.async_fire(
|
||||||
|
EVENT_RECORDING,
|
||||||
|
{
|
||||||
|
ATTR_DEVICE_ID: device_id,
|
||||||
|
ATTR_SECONDS: timeout_seconds,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with contextlib.suppress(asyncio.TimeoutError):
|
||||||
|
async with asyncio.timeout(timeout_seconds):
|
||||||
|
while True:
|
||||||
|
# Send audio chunks encoded as base64
|
||||||
|
audio_bytes = await audio_queue.queue.get()
|
||||||
|
if audio_bytes is None:
|
||||||
|
# Signal to stop
|
||||||
|
break
|
||||||
|
|
||||||
|
connection.send_event(
|
||||||
|
msg["id"],
|
||||||
|
{
|
||||||
|
"type": "audio",
|
||||||
|
"rate": CAPTURE_RATE, # hertz
|
||||||
|
"width": CAPTURE_WIDTH, # bytes
|
||||||
|
"channels": CAPTURE_CHANNELS,
|
||||||
|
"audio": base64.b64encode(audio_bytes).decode("ascii"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Capture has ended
|
||||||
|
connection.send_event(
|
||||||
|
msg["id"], {"type": "end", "overflow": audio_queue.overflow}
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
clean_up_queue()
|
||||||
|
@ -3,10 +3,14 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, TypeVar, cast
|
||||||
|
|
||||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||||
|
from aiohttp import ClientSession
|
||||||
|
from pyasuswrt import AsusWrtError, AsusWrtHttp
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
@ -17,6 +21,7 @@ from homeassistant.const import (
|
|||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.device_registry import format_mac
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
|
|
||||||
@ -29,6 +34,8 @@ from .const import (
|
|||||||
DEFAULT_INTERFACE,
|
DEFAULT_INTERFACE,
|
||||||
KEY_METHOD,
|
KEY_METHOD,
|
||||||
KEY_SENSORS,
|
KEY_SENSORS,
|
||||||
|
PROTOCOL_HTTP,
|
||||||
|
PROTOCOL_HTTPS,
|
||||||
PROTOCOL_TELNET,
|
PROTOCOL_TELNET,
|
||||||
SENSORS_BYTES,
|
SENSORS_BYTES,
|
||||||
SENSORS_LOAD_AVG,
|
SENSORS_LOAD_AVG,
|
||||||
@ -47,9 +54,40 @@ WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"])
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_dict(keys: list, values: list) -> dict[str, Any]:
|
_AsusWrtBridgeT = TypeVar("_AsusWrtBridgeT", bound="AsusWrtBridge")
|
||||||
"""Create a dict from a list of keys and values."""
|
_FuncType = Callable[[_AsusWrtBridgeT], Awaitable[list[Any] | dict[str, Any]]]
|
||||||
return dict(zip(keys, values))
|
_ReturnFuncType = Callable[[_AsusWrtBridgeT], Coroutine[Any, Any, dict[str, Any]]]
|
||||||
|
|
||||||
|
|
||||||
|
def handle_errors_and_zip(
|
||||||
|
exceptions: type[Exception] | tuple[type[Exception], ...], keys: list[str] | None
|
||||||
|
) -> Callable[[_FuncType], _ReturnFuncType]:
|
||||||
|
"""Run library methods and zip results or manage exceptions."""
|
||||||
|
|
||||||
|
def _handle_errors_and_zip(func: _FuncType) -> _ReturnFuncType:
|
||||||
|
"""Run library methods and zip results or manage exceptions."""
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def _wrapper(self: _AsusWrtBridgeT) -> dict[str, Any]:
|
||||||
|
try:
|
||||||
|
data = await func(self)
|
||||||
|
except exceptions as exc:
|
||||||
|
raise UpdateFailed(exc) from exc
|
||||||
|
|
||||||
|
if keys is None:
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise UpdateFailed("Received invalid data type")
|
||||||
|
return data
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return dict(zip(keys, list(data.values())))
|
||||||
|
if not isinstance(data, list):
|
||||||
|
raise UpdateFailed("Received invalid data type")
|
||||||
|
return dict(zip(keys, data))
|
||||||
|
|
||||||
|
return _wrapper
|
||||||
|
|
||||||
|
return _handle_errors_and_zip
|
||||||
|
|
||||||
|
|
||||||
class AsusWrtBridge(ABC):
|
class AsusWrtBridge(ABC):
|
||||||
@ -60,6 +98,9 @@ class AsusWrtBridge(ABC):
|
|||||||
hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None
|
hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None
|
||||||
) -> AsusWrtBridge:
|
) -> AsusWrtBridge:
|
||||||
"""Get Bridge instance."""
|
"""Get Bridge instance."""
|
||||||
|
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
||||||
|
session = async_get_clientsession(hass)
|
||||||
|
return AsusWrtHttpBridge(conf, session)
|
||||||
return AsusWrtLegacyBridge(conf, options)
|
return AsusWrtLegacyBridge(conf, options)
|
||||||
|
|
||||||
def __init__(self, host: str) -> None:
|
def __init__(self, host: str) -> None:
|
||||||
@ -236,38 +277,135 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
|||||||
availability = await self._api.async_find_temperature_commands()
|
availability = await self._api.async_find_temperature_commands()
|
||||||
return [SENSORS_TEMPERATURES[i] for i in range(3) if availability[i]]
|
return [SENSORS_TEMPERATURES[i] for i in range(3) if availability[i]]
|
||||||
|
|
||||||
async def _get_bytes(self) -> dict[str, Any]:
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_BYTES)
|
||||||
|
async def _get_bytes(self) -> Any:
|
||||||
"""Fetch byte information from the router."""
|
"""Fetch byte information from the router."""
|
||||||
try:
|
return await self._api.async_get_bytes_total()
|
||||||
datas = await self._api.async_get_bytes_total()
|
|
||||||
except (IndexError, OSError, ValueError) as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
return _get_dict(SENSORS_BYTES, datas)
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_RATES)
|
||||||
|
async def _get_rates(self) -> Any:
|
||||||
async def _get_rates(self) -> dict[str, Any]:
|
|
||||||
"""Fetch rates information from the router."""
|
"""Fetch rates information from the router."""
|
||||||
try:
|
return await self._api.async_get_current_transfer_rates()
|
||||||
rates = await self._api.async_get_current_transfer_rates()
|
|
||||||
except (IndexError, OSError, ValueError) as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
return _get_dict(SENSORS_RATES, rates)
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_LOAD_AVG)
|
||||||
|
async def _get_load_avg(self) -> Any:
|
||||||
async def _get_load_avg(self) -> dict[str, Any]:
|
|
||||||
"""Fetch load average information from the router."""
|
"""Fetch load average information from the router."""
|
||||||
try:
|
return await self._api.async_get_loadavg()
|
||||||
avg = await self._api.async_get_loadavg()
|
|
||||||
except (IndexError, OSError, ValueError) as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
return _get_dict(SENSORS_LOAD_AVG, avg)
|
@handle_errors_and_zip((OSError, ValueError), None)
|
||||||
|
async def _get_temperatures(self) -> Any:
|
||||||
async def _get_temperatures(self) -> dict[str, Any]:
|
|
||||||
"""Fetch temperatures information from the router."""
|
"""Fetch temperatures information from the router."""
|
||||||
try:
|
return await self._api.async_get_temperature()
|
||||||
temperatures: dict[str, Any] = await self._api.async_get_temperature()
|
|
||||||
except (OSError, ValueError) as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
return temperatures
|
|
||||||
|
class AsusWrtHttpBridge(AsusWrtBridge):
|
||||||
|
"""The Bridge that use HTTP library."""
|
||||||
|
|
||||||
|
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
||||||
|
"""Initialize Bridge that use HTTP library."""
|
||||||
|
super().__init__(conf[CONF_HOST])
|
||||||
|
self._api: AsusWrtHttp = self._get_api(conf, session)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
|
||||||
|
"""Get the AsusWrtHttp API."""
|
||||||
|
return AsusWrtHttp(
|
||||||
|
conf[CONF_HOST],
|
||||||
|
conf[CONF_USERNAME],
|
||||||
|
conf.get(CONF_PASSWORD, ""),
|
||||||
|
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
||||||
|
port=conf.get(CONF_PORT),
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_connected(self) -> bool:
|
||||||
|
"""Get connected status."""
|
||||||
|
return cast(bool, self._api.is_connected)
|
||||||
|
|
||||||
|
async def async_connect(self) -> None:
|
||||||
|
"""Connect to the device."""
|
||||||
|
await self._api.async_connect()
|
||||||
|
|
||||||
|
# get main router properties
|
||||||
|
if mac := self._api.mac:
|
||||||
|
self._label_mac = format_mac(mac)
|
||||||
|
self._firmware = self._api.firmware
|
||||||
|
self._model = self._api.model
|
||||||
|
|
||||||
|
async def async_disconnect(self) -> None:
|
||||||
|
"""Disconnect to the device."""
|
||||||
|
await self._api.async_disconnect()
|
||||||
|
|
||||||
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
|
"""Get list of connected devices."""
|
||||||
|
try:
|
||||||
|
api_devices = await self._api.async_get_connected_devices()
|
||||||
|
except AsusWrtError as exc:
|
||||||
|
raise UpdateFailed(exc) from exc
|
||||||
|
return {
|
||||||
|
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
|
||||||
|
for mac, dev in api_devices.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||||
|
"""Return a dictionary of available sensors for this bridge."""
|
||||||
|
sensors_temperatures = await self._get_available_temperature_sensors()
|
||||||
|
sensors_types = {
|
||||||
|
SENSORS_TYPE_BYTES: {
|
||||||
|
KEY_SENSORS: SENSORS_BYTES,
|
||||||
|
KEY_METHOD: self._get_bytes,
|
||||||
|
},
|
||||||
|
SENSORS_TYPE_LOAD_AVG: {
|
||||||
|
KEY_SENSORS: SENSORS_LOAD_AVG,
|
||||||
|
KEY_METHOD: self._get_load_avg,
|
||||||
|
},
|
||||||
|
SENSORS_TYPE_RATES: {
|
||||||
|
KEY_SENSORS: SENSORS_RATES,
|
||||||
|
KEY_METHOD: self._get_rates,
|
||||||
|
},
|
||||||
|
SENSORS_TYPE_TEMPERATURES: {
|
||||||
|
KEY_SENSORS: sensors_temperatures,
|
||||||
|
KEY_METHOD: self._get_temperatures,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return sensors_types
|
||||||
|
|
||||||
|
async def _get_available_temperature_sensors(self) -> list[str]:
|
||||||
|
"""Check which temperature information is available on the router."""
|
||||||
|
try:
|
||||||
|
available_temps = await self._api.async_get_temperatures()
|
||||||
|
available_sensors = [
|
||||||
|
t for t in SENSORS_TEMPERATURES if t in available_temps
|
||||||
|
]
|
||||||
|
except AsusWrtError as exc:
|
||||||
|
_LOGGER.warning(
|
||||||
|
(
|
||||||
|
"Failed checking temperature sensor availability for ASUS router"
|
||||||
|
" %s. Exception: %s"
|
||||||
|
),
|
||||||
|
self.host,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
return available_sensors
|
||||||
|
|
||||||
|
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
|
||||||
|
async def _get_bytes(self) -> Any:
|
||||||
|
"""Fetch byte information from the router."""
|
||||||
|
return await self._api.async_get_traffic_bytes()
|
||||||
|
|
||||||
|
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
|
||||||
|
async def _get_rates(self) -> Any:
|
||||||
|
"""Fetch rates information from the router."""
|
||||||
|
return await self._api.async_get_traffic_rates()
|
||||||
|
|
||||||
|
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
|
||||||
|
async def _get_load_avg(self) -> Any:
|
||||||
|
"""Fetch cpu load avg information from the router."""
|
||||||
|
return await self._api.async_get_loadavg()
|
||||||
|
|
||||||
|
@handle_errors_and_zip(AsusWrtError, None)
|
||||||
|
async def _get_temperatures(self) -> Any:
|
||||||
|
"""Fetch temperatures information from the router."""
|
||||||
|
return await self._api.async_get_temperatures()
|
||||||
|
@ -7,6 +7,7 @@ import os
|
|||||||
import socket
|
import socket
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from pyasuswrt import AsusWrtError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
@ -15,6 +16,7 @@ from homeassistant.components.device_tracker import (
|
|||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow
|
from homeassistant.config_entries import ConfigEntry, ConfigFlow
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
|
CONF_BASE,
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
CONF_MODE,
|
CONF_MODE,
|
||||||
CONF_PASSWORD,
|
CONF_PASSWORD,
|
||||||
@ -30,6 +32,7 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
|||||||
SchemaFlowFormStep,
|
SchemaFlowFormStep,
|
||||||
SchemaOptionsFlowHandler,
|
SchemaOptionsFlowHandler,
|
||||||
)
|
)
|
||||||
|
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||||
|
|
||||||
from .bridge import AsusWrtBridge
|
from .bridge import AsusWrtBridge
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -44,11 +47,21 @@ from .const import (
|
|||||||
DOMAIN,
|
DOMAIN,
|
||||||
MODE_AP,
|
MODE_AP,
|
||||||
MODE_ROUTER,
|
MODE_ROUTER,
|
||||||
|
PROTOCOL_HTTP,
|
||||||
|
PROTOCOL_HTTPS,
|
||||||
PROTOCOL_SSH,
|
PROTOCOL_SSH,
|
||||||
PROTOCOL_TELNET,
|
PROTOCOL_TELNET,
|
||||||
)
|
)
|
||||||
|
|
||||||
LABEL_MAC = "LABEL_MAC"
|
ALLOWED_PROTOCOL = [
|
||||||
|
PROTOCOL_HTTPS,
|
||||||
|
PROTOCOL_SSH,
|
||||||
|
PROTOCOL_HTTP,
|
||||||
|
PROTOCOL_TELNET,
|
||||||
|
]
|
||||||
|
|
||||||
|
PASS_KEY = "pass_key"
|
||||||
|
PASS_KEY_MSG = "Only provide password or SSH key file"
|
||||||
|
|
||||||
RESULT_CONN_ERROR = "cannot_connect"
|
RESULT_CONN_ERROR = "cannot_connect"
|
||||||
RESULT_SUCCESS = "success"
|
RESULT_SUCCESS = "success"
|
||||||
@ -56,14 +69,20 @@ RESULT_UNKNOWN = "unknown"
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
LEGACY_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_MODE, default=MODE_ROUTER): vol.In(
|
||||||
|
{MODE_ROUTER: "Router", MODE_AP: "Access Point"}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
OPTIONS_SCHEMA = vol.Schema(
|
OPTIONS_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(
|
vol.Optional(
|
||||||
CONF_CONSIDER_HOME, default=DEFAULT_CONSIDER_HOME.total_seconds()
|
CONF_CONSIDER_HOME, default=DEFAULT_CONSIDER_HOME.total_seconds()
|
||||||
): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)),
|
): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)),
|
||||||
vol.Optional(CONF_TRACK_UNKNOWN, default=DEFAULT_TRACK_UNKNOWN): bool,
|
vol.Optional(CONF_TRACK_UNKNOWN, default=DEFAULT_TRACK_UNKNOWN): bool,
|
||||||
vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): str,
|
|
||||||
vol.Required(CONF_DNSMASQ, default=DEFAULT_DNSMASQ): str,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -72,12 +91,22 @@ async def get_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema:
|
|||||||
"""Get options schema."""
|
"""Get options schema."""
|
||||||
options_flow: SchemaOptionsFlowHandler
|
options_flow: SchemaOptionsFlowHandler
|
||||||
options_flow = cast(SchemaOptionsFlowHandler, handler.parent_handler)
|
options_flow = cast(SchemaOptionsFlowHandler, handler.parent_handler)
|
||||||
if options_flow.config_entry.data[CONF_MODE] == MODE_AP:
|
used_protocol = options_flow.config_entry.data[CONF_PROTOCOL]
|
||||||
return OPTIONS_SCHEMA.extend(
|
if used_protocol in [PROTOCOL_SSH, PROTOCOL_TELNET]:
|
||||||
|
data_schema = OPTIONS_SCHEMA.extend(
|
||||||
{
|
{
|
||||||
vol.Optional(CONF_REQUIRE_IP, default=True): bool,
|
vol.Required(CONF_INTERFACE, default=DEFAULT_INTERFACE): str,
|
||||||
|
vol.Required(CONF_DNSMASQ, default=DEFAULT_DNSMASQ): str,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
if options_flow.config_entry.data[CONF_MODE] == MODE_AP:
|
||||||
|
return data_schema.extend(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_REQUIRE_IP, default=True): bool,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return data_schema
|
||||||
|
|
||||||
return OPTIONS_SCHEMA
|
return OPTIONS_SCHEMA
|
||||||
|
|
||||||
|
|
||||||
@ -101,45 +130,47 @@ def _get_ip(host: str) -> str | None:
|
|||||||
|
|
||||||
|
|
||||||
class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow."""
|
"""Handle a config flow for AsusWRT."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the AsusWrt config flow."""
|
||||||
|
self._config_data: dict[str, Any] = {}
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _show_setup_form(
|
def _show_setup_form(self, error: str | None = None) -> FlowResult:
|
||||||
self,
|
|
||||||
user_input: dict[str, Any] | None = None,
|
|
||||||
errors: dict[str, str] | None = None,
|
|
||||||
) -> FlowResult:
|
|
||||||
"""Show the setup form to the user."""
|
"""Show the setup form to the user."""
|
||||||
|
|
||||||
if user_input is None:
|
user_input = self._config_data
|
||||||
user_input = {}
|
|
||||||
|
|
||||||
adv_schema = {}
|
|
||||||
conf_password = vol.Required(CONF_PASSWORD)
|
|
||||||
if self.show_advanced_options:
|
if self.show_advanced_options:
|
||||||
conf_password = vol.Optional(CONF_PASSWORD)
|
add_schema = {
|
||||||
adv_schema[vol.Optional(CONF_PORT)] = cv.port
|
vol.Exclusive(CONF_PASSWORD, PASS_KEY, PASS_KEY_MSG): str,
|
||||||
adv_schema[vol.Optional(CONF_SSH_KEY)] = str
|
vol.Optional(CONF_PORT): cv.port,
|
||||||
|
vol.Exclusive(CONF_SSH_KEY, PASS_KEY, PASS_KEY_MSG): str,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
add_schema = {vol.Required(CONF_PASSWORD): str}
|
||||||
|
|
||||||
schema = {
|
schema = {
|
||||||
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
|
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
|
||||||
vol.Required(CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")): str,
|
vol.Required(CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")): str,
|
||||||
conf_password: str,
|
**add_schema,
|
||||||
vol.Required(CONF_PROTOCOL, default=PROTOCOL_SSH): vol.In(
|
vol.Required(
|
||||||
{PROTOCOL_SSH: "SSH", PROTOCOL_TELNET: "Telnet"}
|
CONF_PROTOCOL,
|
||||||
),
|
default=user_input.get(CONF_PROTOCOL, PROTOCOL_HTTPS),
|
||||||
**adv_schema,
|
): SelectSelector(
|
||||||
vol.Required(CONF_MODE, default=MODE_ROUTER): vol.In(
|
SelectSelectorConfig(
|
||||||
{MODE_ROUTER: "Router", MODE_AP: "Access Point"}
|
options=ALLOWED_PROTOCOL, translation_key="protocols"
|
||||||
|
)
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
data_schema=vol.Schema(schema),
|
data_schema=vol.Schema(schema),
|
||||||
errors=errors or {},
|
errors={CONF_BASE: error} if error else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_check_connection(
|
async def _async_check_connection(
|
||||||
@ -147,25 +178,49 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> tuple[str, str | None]:
|
) -> tuple[str, str | None]:
|
||||||
"""Attempt to connect the AsusWrt router."""
|
"""Attempt to connect the AsusWrt router."""
|
||||||
|
|
||||||
|
api: AsusWrtBridge
|
||||||
host: str = user_input[CONF_HOST]
|
host: str = user_input[CONF_HOST]
|
||||||
api = AsusWrtBridge.get_bridge(self.hass, user_input)
|
protocol = user_input[CONF_PROTOCOL]
|
||||||
|
error: str | None = None
|
||||||
|
|
||||||
|
conf = {**user_input, CONF_MODE: MODE_ROUTER}
|
||||||
|
api = AsusWrtBridge.get_bridge(self.hass, conf)
|
||||||
try:
|
try:
|
||||||
await api.async_connect()
|
await api.async_connect()
|
||||||
|
|
||||||
except OSError:
|
except (AsusWrtError, OSError):
|
||||||
_LOGGER.error("Error connecting to the AsusWrt router at %s", host)
|
_LOGGER.error(
|
||||||
return RESULT_CONN_ERROR, None
|
"Error connecting to the AsusWrt router at %s using protocol %s",
|
||||||
|
host,
|
||||||
|
protocol,
|
||||||
|
)
|
||||||
|
error = RESULT_CONN_ERROR
|
||||||
|
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Unknown error connecting with AsusWrt router at %s", host
|
"Unknown error connecting with AsusWrt router at %s using protocol %s",
|
||||||
|
host,
|
||||||
|
protocol,
|
||||||
)
|
)
|
||||||
return RESULT_UNKNOWN, None
|
error = RESULT_UNKNOWN
|
||||||
|
|
||||||
if not api.is_connected:
|
if error is None:
|
||||||
_LOGGER.error("Error connecting to the AsusWrt router at %s", host)
|
if not api.is_connected:
|
||||||
return RESULT_CONN_ERROR, None
|
_LOGGER.error(
|
||||||
|
"Error connecting to the AsusWrt router at %s using protocol %s",
|
||||||
|
host,
|
||||||
|
protocol,
|
||||||
|
)
|
||||||
|
error = RESULT_CONN_ERROR
|
||||||
|
|
||||||
|
if error is not None:
|
||||||
|
return error, None
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Successfully connected to the AsusWrt router at %s using protocol %s",
|
||||||
|
host,
|
||||||
|
protocol,
|
||||||
|
)
|
||||||
unique_id = api.label_mac
|
unique_id = api.label_mac
|
||||||
await api.async_disconnect()
|
await api.async_disconnect()
|
||||||
|
|
||||||
@ -182,51 +237,59 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_abort(reason="no_unique_id")
|
return self.async_abort(reason="no_unique_id")
|
||||||
|
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self._show_setup_form(user_input)
|
return self._show_setup_form()
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
|
||||||
host: str = user_input[CONF_HOST]
|
|
||||||
|
|
||||||
|
self._config_data = user_input
|
||||||
pwd: str | None = user_input.get(CONF_PASSWORD)
|
pwd: str | None = user_input.get(CONF_PASSWORD)
|
||||||
ssh: str | None = user_input.get(CONF_SSH_KEY)
|
ssh: str | None = user_input.get(CONF_SSH_KEY)
|
||||||
|
protocol: str = user_input[CONF_PROTOCOL]
|
||||||
|
|
||||||
|
if not pwd and protocol != PROTOCOL_SSH:
|
||||||
|
return self._show_setup_form(error="pwd_required")
|
||||||
if not (pwd or ssh):
|
if not (pwd or ssh):
|
||||||
errors["base"] = "pwd_or_ssh"
|
return self._show_setup_form(error="pwd_or_ssh")
|
||||||
elif ssh:
|
if ssh and not await self.hass.async_add_executor_job(_is_file, ssh):
|
||||||
if pwd:
|
return self._show_setup_form(error="ssh_not_file")
|
||||||
errors["base"] = "pwd_and_ssh"
|
|
||||||
|
host: str = user_input[CONF_HOST]
|
||||||
|
if not await self.hass.async_add_executor_job(_get_ip, host):
|
||||||
|
return self._show_setup_form(error="invalid_host")
|
||||||
|
|
||||||
|
result, unique_id = await self._async_check_connection(user_input)
|
||||||
|
if result == RESULT_SUCCESS:
|
||||||
|
if unique_id:
|
||||||
|
await self.async_set_unique_id(unique_id)
|
||||||
|
# we allow to configure a single instance without unique id
|
||||||
|
elif self._async_current_entries():
|
||||||
|
return self.async_abort(reason="invalid_unique_id")
|
||||||
else:
|
else:
|
||||||
isfile = await self.hass.async_add_executor_job(_is_file, ssh)
|
_LOGGER.warning(
|
||||||
if not isfile:
|
"This device does not provide a valid Unique ID."
|
||||||
errors["base"] = "ssh_not_file"
|
" Configuration of multiple instance will not be possible"
|
||||||
|
|
||||||
if not errors:
|
|
||||||
ip_address = await self.hass.async_add_executor_job(_get_ip, host)
|
|
||||||
if not ip_address:
|
|
||||||
errors["base"] = "invalid_host"
|
|
||||||
|
|
||||||
if not errors:
|
|
||||||
result, unique_id = await self._async_check_connection(user_input)
|
|
||||||
if result == RESULT_SUCCESS:
|
|
||||||
if unique_id:
|
|
||||||
await self.async_set_unique_id(unique_id)
|
|
||||||
# we allow configure a single instance without unique id
|
|
||||||
elif self._async_current_entries():
|
|
||||||
return self.async_abort(reason="invalid_unique_id")
|
|
||||||
else:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"This device does not provide a valid Unique ID."
|
|
||||||
" Configuration of multiple instance will not be possible"
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
|
||||||
title=host,
|
|
||||||
data=user_input,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
errors["base"] = result
|
if protocol in [PROTOCOL_SSH, PROTOCOL_TELNET]:
|
||||||
|
return await self.async_step_legacy()
|
||||||
|
return await self._async_save_entry()
|
||||||
|
|
||||||
return self._show_setup_form(user_input, errors)
|
return self._show_setup_form(error=result)
|
||||||
|
|
||||||
|
async def async_step_legacy(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> FlowResult:
|
||||||
|
"""Handle a flow for legacy settings."""
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(step_id="legacy", data_schema=LEGACY_SCHEMA)
|
||||||
|
|
||||||
|
self._config_data.update(user_input)
|
||||||
|
return await self._async_save_entry()
|
||||||
|
|
||||||
|
async def _async_save_entry(self) -> FlowResult:
|
||||||
|
"""Save entry data if unique id is valid."""
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=self._config_data[CONF_HOST],
|
||||||
|
data=self._config_data,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
|
@ -20,6 +20,8 @@ KEY_SENSORS = "sensors"
|
|||||||
MODE_AP = "ap"
|
MODE_AP = "ap"
|
||||||
MODE_ROUTER = "router"
|
MODE_ROUTER = "router"
|
||||||
|
|
||||||
|
PROTOCOL_HTTP = "http"
|
||||||
|
PROTOCOL_HTTPS = "https"
|
||||||
PROTOCOL_SSH = "ssh"
|
PROTOCOL_SSH = "ssh"
|
||||||
PROTOCOL_TELNET = "telnet"
|
PROTOCOL_TELNET = "telnet"
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ async def async_get_config_entry_diagnostics(
|
|||||||
device_registry = dr.async_get(hass)
|
device_registry = dr.async_get(hass)
|
||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
hass_device = device_registry.async_get_device(
|
hass_device = device_registry.async_get_device(
|
||||||
identifiers=router.device_info["identifiers"]
|
identifiers=router.device_info[ATTR_IDENTIFIERS]
|
||||||
)
|
)
|
||||||
if not hass_device:
|
if not hass_device:
|
||||||
return data
|
return data
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioasuswrt", "asyncssh"],
|
"loggers": ["aioasuswrt", "asyncssh"],
|
||||||
"requirements": ["aioasuswrt==1.4.0"]
|
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.20"]
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,8 @@ from datetime import datetime, timedelta
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from pyasuswrt import AsusWrtError
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
CONF_CONSIDER_HOME,
|
CONF_CONSIDER_HOME,
|
||||||
DEFAULT_CONSIDER_HOME,
|
DEFAULT_CONSIDER_HOME,
|
||||||
@ -219,7 +221,7 @@ class AsusWrtRouter:
|
|||||||
"""Set up a AsusWrt router."""
|
"""Set up a AsusWrt router."""
|
||||||
try:
|
try:
|
||||||
await self._api.async_connect()
|
await self._api.async_connect()
|
||||||
except OSError as exc:
|
except (AsusWrtError, OSError) as exc:
|
||||||
raise ConfigEntryNotReady from exc
|
raise ConfigEntryNotReady from exc
|
||||||
if not self._api.is_connected:
|
if not self._api.is_connected:
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
|
@ -2,25 +2,31 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "AsusWRT",
|
|
||||||
"description": "Set required parameter to connect to your router",
|
"description": "Set required parameter to connect to your router",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"name": "[%key:common::config_flow::data::name%]",
|
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]",
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
"ssh_key": "Path to your SSH key file (instead of password)",
|
"ssh_key": "Path to your SSH key file (instead of password)",
|
||||||
"protocol": "Communication protocol to use",
|
"protocol": "Communication protocol to use",
|
||||||
"port": "Port (leave empty for protocol default)",
|
"port": "Port (leave empty for protocol default)"
|
||||||
"mode": "[%key:common::config_flow::data::mode%]"
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of your ASUSWRT router."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"legacy": {
|
||||||
|
"description": "Set required parameters to connect to your router",
|
||||||
|
"data": {
|
||||||
|
"mode": "Router operating mode"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_host": "[%key:common::config_flow::error::invalid_host%]",
|
"invalid_host": "[%key:common::config_flow::error::invalid_host%]",
|
||||||
"pwd_and_ssh": "Only provide password or SSH key file",
|
|
||||||
"pwd_or_ssh": "Please provide password or SSH key file",
|
"pwd_or_ssh": "Please provide password or SSH key file",
|
||||||
|
"pwd_required": "Password is required for selected protocol",
|
||||||
"ssh_not_file": "SSH key file not found",
|
"ssh_not_file": "SSH key file not found",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
@ -32,7 +38,6 @@
|
|||||||
"options": {
|
"options": {
|
||||||
"step": {
|
"step": {
|
||||||
"init": {
|
"init": {
|
||||||
"title": "AsusWRT Options",
|
|
||||||
"data": {
|
"data": {
|
||||||
"consider_home": "Seconds to wait before considering a device away",
|
"consider_home": "Seconds to wait before considering a device away",
|
||||||
"track_unknown": "Track unknown / unnamed devices",
|
"track_unknown": "Track unknown / unnamed devices",
|
||||||
@ -79,5 +84,15 @@
|
|||||||
"name": "CPU Temperature"
|
"name": "CPU Temperature"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"selector": {
|
||||||
|
"protocols": {
|
||||||
|
"options": {
|
||||||
|
"https": "HTTPS",
|
||||||
|
"http": "HTTP",
|
||||||
|
"ssh": "SSH",
|
||||||
|
"telnet": "Telnet"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,13 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "Connect to the device",
|
"description": "Connect to the device",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"port": "[%key:common::config_flow::data::port%]"
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the Atag device."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -2,10 +2,12 @@
|
|||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
|
|
||||||
from yalexs.doorbell import Doorbell
|
from yalexs.doorbell import Doorbell
|
||||||
from yalexs.lock import Lock
|
from yalexs.lock import Lock, LockDetail
|
||||||
from yalexs.util import get_configuration_url
|
from yalexs.util import get_configuration_url
|
||||||
|
|
||||||
|
from homeassistant.const import ATTR_CONNECTIONS
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity import Entity
|
from homeassistant.helpers.entity import Entity
|
||||||
|
|
||||||
@ -26,15 +28,18 @@ class AugustEntityMixin(Entity):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self._data = data
|
self._data = data
|
||||||
self._device = device
|
self._device = device
|
||||||
|
detail = self._detail
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, self._device_id)},
|
identifiers={(DOMAIN, self._device_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
model=self._detail.model,
|
model=detail.model,
|
||||||
name=device.device_name,
|
name=device.device_name,
|
||||||
sw_version=self._detail.firmware_version,
|
sw_version=detail.firmware_version,
|
||||||
suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES),
|
suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES),
|
||||||
configuration_url=get_configuration_url(data.brand),
|
configuration_url=get_configuration_url(data.brand),
|
||||||
)
|
)
|
||||||
|
if isinstance(detail, LockDetail) and (mac := detail.mac_address):
|
||||||
|
self._attr_device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_BLUETOOTH, mac)}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _device_id(self):
|
def _device_id(self):
|
||||||
|
@ -12,13 +12,14 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aurorapy.client import AuroraSerialClient
|
from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform
|
from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN, SCAN_INTERVAL
|
||||||
|
|
||||||
PLATFORMS = [Platform.SENSOR]
|
PLATFORMS = [Platform.SENSOR]
|
||||||
|
|
||||||
@ -30,8 +31,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
comport = entry.data[CONF_PORT]
|
comport = entry.data[CONF_PORT]
|
||||||
address = entry.data[CONF_ADDRESS]
|
address = entry.data[CONF_ADDRESS]
|
||||||
ser_client = AuroraSerialClient(address, comport, parity="N", timeout=1)
|
coordinator = AuroraAbbDataUpdateCoordinator(hass, comport, address)
|
||||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ser_client
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -47,3 +50,58 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
hass.data[DOMAIN].pop(entry.entry_id)
|
hass.data[DOMAIN].pop(entry.entry_id)
|
||||||
|
|
||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
|
|
||||||
|
class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]):
|
||||||
|
"""Class to manage fetching AuroraAbbPowerone data."""
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, comport: str, address: int) -> None:
|
||||||
|
"""Initialize the data update coordinator."""
|
||||||
|
self.available_prev = False
|
||||||
|
self.available = False
|
||||||
|
self.client = AuroraSerialClient(address, comport, parity="N", timeout=1)
|
||||||
|
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
|
||||||
|
|
||||||
|
def _update_data(self) -> dict[str, float]:
|
||||||
|
"""Fetch new state data for the sensor.
|
||||||
|
|
||||||
|
This is the only function that should fetch new data for Home Assistant.
|
||||||
|
"""
|
||||||
|
data: dict[str, float] = {}
|
||||||
|
self.available_prev = self.available
|
||||||
|
try:
|
||||||
|
self.client.connect()
|
||||||
|
|
||||||
|
# read ADC channel 3 (grid power output)
|
||||||
|
power_watts = self.client.measure(3, True)
|
||||||
|
temperature_c = self.client.measure(21)
|
||||||
|
energy_wh = self.client.cumulated_energy(5)
|
||||||
|
except AuroraTimeoutError:
|
||||||
|
self.available = False
|
||||||
|
_LOGGER.debug("No response from inverter (could be dark)")
|
||||||
|
except AuroraError as error:
|
||||||
|
self.available = False
|
||||||
|
raise error
|
||||||
|
else:
|
||||||
|
data["instantaneouspower"] = round(power_watts, 1)
|
||||||
|
data["temp"] = round(temperature_c, 1)
|
||||||
|
data["totalenergy"] = round(energy_wh / 1000, 2)
|
||||||
|
self.available = True
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if self.available != self.available_prev:
|
||||||
|
if self.available:
|
||||||
|
_LOGGER.info("Communication with %s back online", self.name)
|
||||||
|
else:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Communication with %s lost",
|
||||||
|
self.name,
|
||||||
|
)
|
||||||
|
if self.client.serline.isOpen():
|
||||||
|
self.client.close()
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> dict[str, float]:
|
||||||
|
"""Update inverter data in the executor."""
|
||||||
|
return await self.hass.async_add_executor_job(self._update_data)
|
||||||
|
@ -1,57 +0,0 @@
|
|||||||
"""Top level class for AuroraABBPowerOneSolarPV inverters and sensors."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import Mapping
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aurorapy.client import AuroraSerialClient
|
|
||||||
|
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
|
||||||
from homeassistant.helpers.entity import Entity
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
ATTR_DEVICE_NAME,
|
|
||||||
ATTR_FIRMWARE,
|
|
||||||
ATTR_MODEL,
|
|
||||||
ATTR_SERIAL_NUMBER,
|
|
||||||
DEFAULT_DEVICE_NAME,
|
|
||||||
DOMAIN,
|
|
||||||
MANUFACTURER,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AuroraEntity(Entity):
|
|
||||||
"""Representation of an Aurora ABB PowerOne device."""
|
|
||||||
|
|
||||||
def __init__(self, client: AuroraSerialClient, data: Mapping[str, Any]) -> None:
|
|
||||||
"""Initialise the basic device."""
|
|
||||||
self._data = data
|
|
||||||
self.type = "device"
|
|
||||||
self.client = client
|
|
||||||
self._available = True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def unique_id(self) -> str | None:
|
|
||||||
"""Return the unique id for this device."""
|
|
||||||
if (serial := self._data.get(ATTR_SERIAL_NUMBER)) is None:
|
|
||||||
return None
|
|
||||||
return f"{serial}_{self.entity_description.key}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self) -> bool:
|
|
||||||
"""Return True if entity is available."""
|
|
||||||
return self._available
|
|
||||||
|
|
||||||
@property
|
|
||||||
def device_info(self) -> DeviceInfo:
|
|
||||||
"""Return device specific attributes."""
|
|
||||||
return DeviceInfo(
|
|
||||||
identifiers={(DOMAIN, self._data[ATTR_SERIAL_NUMBER])},
|
|
||||||
manufacturer=MANUFACTURER,
|
|
||||||
model=self._data[ATTR_MODEL],
|
|
||||||
name=self._data.get(ATTR_DEVICE_NAME, DEFAULT_DEVICE_NAME),
|
|
||||||
sw_version=self._data[ATTR_FIRMWARE],
|
|
||||||
)
|
|
@ -1,5 +1,7 @@
|
|||||||
"""Constants for the Aurora ABB PowerOne integration."""
|
"""Constants for the Aurora ABB PowerOne integration."""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
DOMAIN = "aurora_abb_powerone"
|
DOMAIN = "aurora_abb_powerone"
|
||||||
|
|
||||||
# Min max addresses and default according to here:
|
# Min max addresses and default according to here:
|
||||||
@ -8,6 +10,7 @@ DOMAIN = "aurora_abb_powerone"
|
|||||||
MIN_ADDRESS = 2
|
MIN_ADDRESS = 2
|
||||||
MAX_ADDRESS = 63
|
MAX_ADDRESS = 63
|
||||||
DEFAULT_ADDRESS = 2
|
DEFAULT_ADDRESS = 2
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
DEFAULT_INTEGRATION_TITLE = "PhotoVoltaic Inverters"
|
DEFAULT_INTEGRATION_TITLE = "PhotoVoltaic Inverters"
|
||||||
DEFAULT_DEVICE_NAME = "Solar Inverter"
|
DEFAULT_DEVICE_NAME = "Solar Inverter"
|
||||||
|
@ -5,8 +5,6 @@ from collections.abc import Mapping
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
@ -21,10 +19,21 @@ from homeassistant.const import (
|
|||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.typing import StateType
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .aurora_device import AuroraEntity
|
from . import AuroraAbbDataUpdateCoordinator
|
||||||
from .const import DOMAIN
|
from .const import (
|
||||||
|
ATTR_DEVICE_NAME,
|
||||||
|
ATTR_FIRMWARE,
|
||||||
|
ATTR_MODEL,
|
||||||
|
ATTR_SERIAL_NUMBER,
|
||||||
|
DEFAULT_DEVICE_NAME,
|
||||||
|
DOMAIN,
|
||||||
|
MANUFACTURER,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -61,70 +70,40 @@ async def async_setup_entry(
|
|||||||
"""Set up aurora_abb_powerone sensor based on a config entry."""
|
"""Set up aurora_abb_powerone sensor based on a config entry."""
|
||||||
entities = []
|
entities = []
|
||||||
|
|
||||||
client = hass.data[DOMAIN][config_entry.entry_id]
|
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||||
data = config_entry.data
|
data = config_entry.data
|
||||||
|
|
||||||
for sens in SENSOR_TYPES:
|
for sens in SENSOR_TYPES:
|
||||||
entities.append(AuroraSensor(client, data, sens))
|
entities.append(AuroraSensor(coordinator, data, sens))
|
||||||
|
|
||||||
_LOGGER.debug("async_setup_entry adding %d entities", len(entities))
|
_LOGGER.debug("async_setup_entry adding %d entities", len(entities))
|
||||||
async_add_entities(entities, True)
|
async_add_entities(entities, True)
|
||||||
|
|
||||||
|
|
||||||
class AuroraSensor(AuroraEntity, SensorEntity):
|
class AuroraSensor(CoordinatorEntity[AuroraAbbDataUpdateCoordinator], SensorEntity):
|
||||||
"""Representation of a Sensor on a Aurora ABB PowerOne Solar inverter."""
|
"""Representation of a Sensor on an Aurora ABB PowerOne Solar inverter."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
client: AuroraSerialClient,
|
coordinator: AuroraAbbDataUpdateCoordinator,
|
||||||
data: Mapping[str, Any],
|
data: Mapping[str, Any],
|
||||||
entity_description: SensorEntityDescription,
|
entity_description: SensorEntityDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
super().__init__(client, data)
|
super().__init__(coordinator)
|
||||||
self.entity_description = entity_description
|
self.entity_description = entity_description
|
||||||
self.available_prev = True
|
self._attr_unique_id = f"{data[ATTR_SERIAL_NUMBER]}_{entity_description.key}"
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, data[ATTR_SERIAL_NUMBER])},
|
||||||
|
manufacturer=MANUFACTURER,
|
||||||
|
model=data[ATTR_MODEL],
|
||||||
|
name=data.get(ATTR_DEVICE_NAME, DEFAULT_DEVICE_NAME),
|
||||||
|
sw_version=data[ATTR_FIRMWARE],
|
||||||
|
)
|
||||||
|
|
||||||
def update(self) -> None:
|
@property
|
||||||
"""Fetch new state data for the sensor.
|
def native_value(self) -> StateType:
|
||||||
|
"""Get the value of the sensor from previously collected data."""
|
||||||
This is the only method that should fetch new data for Home Assistant.
|
return self.coordinator.data.get(self.entity_description.key)
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.available_prev = self._attr_available
|
|
||||||
self.client.connect()
|
|
||||||
if self.entity_description.key == "instantaneouspower":
|
|
||||||
# read ADC channel 3 (grid power output)
|
|
||||||
power_watts = self.client.measure(3, True)
|
|
||||||
self._attr_native_value = round(power_watts, 1)
|
|
||||||
elif self.entity_description.key == "temp":
|
|
||||||
temperature_c = self.client.measure(21)
|
|
||||||
self._attr_native_value = round(temperature_c, 1)
|
|
||||||
elif self.entity_description.key == "totalenergy":
|
|
||||||
energy_wh = self.client.cumulated_energy(5)
|
|
||||||
self._attr_native_value = round(energy_wh / 1000, 2)
|
|
||||||
self._attr_available = True
|
|
||||||
|
|
||||||
except AuroraTimeoutError:
|
|
||||||
self._attr_state = None
|
|
||||||
self._attr_native_value = None
|
|
||||||
self._attr_available = False
|
|
||||||
_LOGGER.debug("No response from inverter (could be dark)")
|
|
||||||
except AuroraError as error:
|
|
||||||
self._attr_state = None
|
|
||||||
self._attr_native_value = None
|
|
||||||
self._attr_available = False
|
|
||||||
raise error
|
|
||||||
finally:
|
|
||||||
if self._attr_available != self.available_prev:
|
|
||||||
if self._attr_available:
|
|
||||||
_LOGGER.info("Communication with %s back online", self.name)
|
|
||||||
else:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Communication with %s lost",
|
|
||||||
self.name,
|
|
||||||
)
|
|
||||||
if self.client.serline.isOpen():
|
|
||||||
self.client.close()
|
|
||||||
|
@ -71,14 +71,14 @@ from __future__ import annotations
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
import voluptuous_serialize
|
import voluptuous_serialize
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
from homeassistant import data_entry_flow
|
||||||
from homeassistant.auth import AuthManagerFlowManager
|
from homeassistant.auth import AuthManagerFlowManager, InvalidAuthError
|
||||||
from homeassistant.auth.models import Credentials
|
from homeassistant.auth.models import Credentials
|
||||||
from homeassistant.components import onboarding
|
from homeassistant.components import onboarding
|
||||||
from homeassistant.components.http.auth import async_user_not_allowed_do_auth
|
from homeassistant.components.http.auth import async_user_not_allowed_do_auth
|
||||||
@ -90,10 +90,16 @@ from homeassistant.components.http.ban import (
|
|||||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||||
from homeassistant.components.http.view import HomeAssistantView
|
from homeassistant.components.http.view import HomeAssistantView
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.network import is_cloud_connection
|
||||||
|
from homeassistant.util.network import is_local
|
||||||
|
|
||||||
from . import indieauth
|
from . import indieauth
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from homeassistant.auth.providers.trusted_networks import (
|
||||||
|
TrustedNetworksAuthProvider,
|
||||||
|
)
|
||||||
|
|
||||||
from . import StoreResultType
|
from . import StoreResultType
|
||||||
|
|
||||||
|
|
||||||
@ -146,12 +152,61 @@ class AuthProvidersView(HomeAssistantView):
|
|||||||
message_code="onboarding_required",
|
message_code="onboarding_required",
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.json(
|
try:
|
||||||
[
|
remote_address = ip_address(request.remote) # type: ignore[arg-type]
|
||||||
{"name": provider.name, "id": provider.id, "type": provider.type}
|
except ValueError:
|
||||||
for provider in hass.auth.auth_providers
|
return self.json_message(
|
||||||
]
|
message="Invalid remote IP",
|
||||||
)
|
status_code=HTTPStatus.BAD_REQUEST,
|
||||||
|
message_code="invalid_remote_ip",
|
||||||
|
)
|
||||||
|
|
||||||
|
cloud_connection = is_cloud_connection(hass)
|
||||||
|
|
||||||
|
providers = []
|
||||||
|
for provider in hass.auth.auth_providers:
|
||||||
|
additional_data = {}
|
||||||
|
|
||||||
|
if provider.type == "trusted_networks":
|
||||||
|
if cloud_connection:
|
||||||
|
# Skip quickly as trusted networks are not available on cloud
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
cast("TrustedNetworksAuthProvider", provider).async_validate_access(
|
||||||
|
remote_address
|
||||||
|
)
|
||||||
|
except InvalidAuthError:
|
||||||
|
# Not a trusted network, so we don't expose that trusted_network authenticator is setup
|
||||||
|
continue
|
||||||
|
elif (
|
||||||
|
provider.type == "homeassistant"
|
||||||
|
and not cloud_connection
|
||||||
|
and is_local(remote_address)
|
||||||
|
and "person" in hass.config.components
|
||||||
|
):
|
||||||
|
# We are local, return user id and username
|
||||||
|
users = await provider.store.async_get_users()
|
||||||
|
additional_data["users"] = {
|
||||||
|
user.id: credentials.data["username"]
|
||||||
|
for user in users
|
||||||
|
for credentials in user.credentials
|
||||||
|
if (
|
||||||
|
credentials.auth_provider_type == provider.type
|
||||||
|
and credentials.auth_provider_id == provider.id
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
providers.append(
|
||||||
|
{
|
||||||
|
"name": provider.name,
|
||||||
|
"id": provider.id,
|
||||||
|
"type": provider.type,
|
||||||
|
**additional_data,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.json(providers)
|
||||||
|
|
||||||
|
|
||||||
def _prepare_result_json(
|
def _prepare_result_json(
|
||||||
@ -235,7 +290,7 @@ class LoginFlowBaseView(HomeAssistantView):
|
|||||||
f"Login blocked: {user_access_error}", HTTPStatus.FORBIDDEN
|
f"Login blocked: {user_access_error}", HTTPStatus.FORBIDDEN
|
||||||
)
|
)
|
||||||
|
|
||||||
await process_success_login(request)
|
process_success_login(request)
|
||||||
result["result"] = self._store_result(client_id, result_obj)
|
result["result"] = self._store_result(client_id, result_obj)
|
||||||
|
|
||||||
return self.json(result)
|
return self.json(result)
|
||||||
|
@ -31,5 +31,11 @@
|
|||||||
"invalid_code": "Invalid code, please try again."
|
"invalid_code": "Invalid code, please try again."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"deprecated_legacy_api_password": {
|
||||||
|
"title": "The legacy API password is deprecated",
|
||||||
|
"description": "The legacy API password authentication provider is deprecated and will be removed. Please remove it from your YAML configuration and use the default Home Assistant authentication provider instead."
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Helpers for automation integration."""
|
"""Helpers for automation integration."""
|
||||||
from homeassistant.components import blueprint
|
from homeassistant.components import blueprint
|
||||||
|
from homeassistant.const import SERVICE_RELOAD
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
|
|
||||||
@ -15,8 +16,17 @@ def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
|
|||||||
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
return len(automations_with_blueprint(hass, blueprint_path)) > 0
|
||||||
|
|
||||||
|
|
||||||
|
async def _reload_blueprint_automations(
|
||||||
|
hass: HomeAssistant, blueprint_path: str
|
||||||
|
) -> None:
|
||||||
|
"""Reload all automations that rely on a specific blueprint."""
|
||||||
|
await hass.services.async_call(DOMAIN, SERVICE_RELOAD)
|
||||||
|
|
||||||
|
|
||||||
@singleton(DATA_BLUEPRINTS)
|
@singleton(DATA_BLUEPRINTS)
|
||||||
@callback
|
@callback
|
||||||
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
|
||||||
"""Get automation blueprints."""
|
"""Get automation blueprints."""
|
||||||
return blueprint.DomainBlueprints(hass, DOMAIN, LOGGER, _blueprint_in_use)
|
return blueprint.DomainBlueprints(
|
||||||
|
hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_automations
|
||||||
|
)
|
||||||
|
@ -3,12 +3,16 @@
|
|||||||
"flow_title": "{name} ({host})",
|
"flow_title": "{name} ({host})",
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "Set up Axis device",
|
"description": "Set up an Axis device",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"username": "[%key:common::config_flow::data::username%]",
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
"password": "[%key:common::config_flow::data::password%]",
|
"password": "[%key:common::config_flow::data::password%]",
|
||||||
"port": "[%key:common::config_flow::data::port%]"
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the Axis device.",
|
||||||
|
"username": "The user name you set up on your Axis device. It is recommended to create a user specifically for Home Assistant."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -93,8 +93,6 @@ class BAFFan(BAFEntity, FanEntity):
|
|||||||
|
|
||||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||||
"""Set the preset mode of the fan."""
|
"""Set the preset mode of the fan."""
|
||||||
if preset_mode != PRESET_MODE_AUTO:
|
|
||||||
raise ValueError(f"Invalid preset mode: {preset_mode}")
|
|
||||||
self._device.fan_mode = OffOnAuto.AUTO
|
self._device.fan_mode = OffOnAuto.AUTO
|
||||||
|
|
||||||
async def async_set_direction(self, direction: str) -> None:
|
async def async_set_direction(self, direction: str) -> None:
|
||||||
|
@ -47,31 +47,27 @@ class BalboaBinarySensorEntityDescription(
|
|||||||
):
|
):
|
||||||
"""A class that describes Balboa binary sensor entities."""
|
"""A class that describes Balboa binary sensor entities."""
|
||||||
|
|
||||||
# BalboaBinarySensorEntity does not support UNDEFINED or None,
|
|
||||||
# restrict the type to str.
|
|
||||||
name: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
FILTER_CYCLE_ICONS = ("mdi:sync", "mdi:sync-off")
|
FILTER_CYCLE_ICONS = ("mdi:sync", "mdi:sync-off")
|
||||||
BINARY_SENSOR_DESCRIPTIONS = (
|
BINARY_SENSOR_DESCRIPTIONS = (
|
||||||
BalboaBinarySensorEntityDescription(
|
BalboaBinarySensorEntityDescription(
|
||||||
key="filter_cycle_1",
|
key="Filter1",
|
||||||
name="Filter1",
|
translation_key="filter_1",
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
is_on_fn=lambda spa: spa.filter_cycle_1_running,
|
is_on_fn=lambda spa: spa.filter_cycle_1_running,
|
||||||
on_off_icons=FILTER_CYCLE_ICONS,
|
on_off_icons=FILTER_CYCLE_ICONS,
|
||||||
),
|
),
|
||||||
BalboaBinarySensorEntityDescription(
|
BalboaBinarySensorEntityDescription(
|
||||||
key="filter_cycle_2",
|
key="Filter2",
|
||||||
name="Filter2",
|
translation_key="filter_2",
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
is_on_fn=lambda spa: spa.filter_cycle_2_running,
|
is_on_fn=lambda spa: spa.filter_cycle_2_running,
|
||||||
on_off_icons=FILTER_CYCLE_ICONS,
|
on_off_icons=FILTER_CYCLE_ICONS,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
CIRCULATION_PUMP_DESCRIPTION = BalboaBinarySensorEntityDescription(
|
CIRCULATION_PUMP_DESCRIPTION = BalboaBinarySensorEntityDescription(
|
||||||
key="circulation_pump",
|
key="Circ Pump",
|
||||||
name="Circ Pump",
|
translation_key="circ_pump",
|
||||||
device_class=BinarySensorDeviceClass.RUNNING,
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
is_on_fn=lambda spa: (pump := spa.circulation_pump) is not None and pump.state > 0,
|
is_on_fn=lambda spa: (pump := spa.circulation_pump) is not None and pump.state > 0,
|
||||||
on_off_icons=("mdi:pump", "mdi:pump-off"),
|
on_off_icons=("mdi:pump", "mdi:pump-off"),
|
||||||
@ -87,7 +83,7 @@ class BalboaBinarySensorEntity(BalboaEntity, BinarySensorEntity):
|
|||||||
self, spa: SpaClient, description: BalboaBinarySensorEntityDescription
|
self, spa: SpaClient, description: BalboaBinarySensorEntityDescription
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize a Balboa binary sensor entity."""
|
"""Initialize a Balboa binary sensor entity."""
|
||||||
super().__init__(spa, description.name)
|
super().__init__(spa, description.key)
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -59,6 +59,7 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity):
|
|||||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||||
)
|
)
|
||||||
_attr_translation_key = DOMAIN
|
_attr_translation_key = DOMAIN
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
def __init__(self, client: SpaClient) -> None:
|
def __init__(self, client: SpaClient) -> None:
|
||||||
"""Initialize the climate entity."""
|
"""Initialize the climate entity."""
|
||||||
|
@ -15,12 +15,11 @@ class BalboaEntity(Entity):
|
|||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def __init__(self, client: SpaClient, name: str | None = None) -> None:
|
def __init__(self, client: SpaClient, key: str) -> None:
|
||||||
"""Initialize the control."""
|
"""Initialize the control."""
|
||||||
mac = client.mac_address
|
mac = client.mac_address
|
||||||
model = client.model
|
model = client.model
|
||||||
self._attr_unique_id = f'{model}-{name}-{mac.replace(":","")[-6:]}'
|
self._attr_unique_id = f'{model}-{key}-{mac.replace(":","")[-6:]}'
|
||||||
self._attr_name = name
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, mac)},
|
identifiers={(DOMAIN, mac)},
|
||||||
name=model,
|
name=model,
|
||||||
|
@ -2,9 +2,12 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"title": "Connect to the Balboa Wi-Fi device",
|
"description": "Connect to the Balboa Wi-Fi device",
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]"
|
"host": "[%key:common::config_flow::data::host%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "Hostname or IP address of your Balboa Spa Wifi Device. For example, 192.168.1.58."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -26,6 +29,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"filter_1": {
|
||||||
|
"name": "Filter cycle 1"
|
||||||
|
},
|
||||||
|
"filter_2": {
|
||||||
|
"name": "Filter cycle 2"
|
||||||
|
},
|
||||||
|
"circ_pump": {
|
||||||
|
"name": "Circulation pump"
|
||||||
|
}
|
||||||
|
},
|
||||||
"climate": {
|
"climate": {
|
||||||
"balboa": {
|
"balboa": {
|
||||||
"state_attributes": {
|
"state_attributes": {
|
||||||
|
@ -10,8 +10,9 @@ from typing import Literal, final
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import STATE_OFF, STATE_ON
|
from homeassistant.const import STATE_OFF, STATE_ON, EntityCategory
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.config_validation import ( # noqa: F401
|
from homeassistant.helpers.config_validation import ( # noqa: F401
|
||||||
PLATFORM_SCHEMA,
|
PLATFORM_SCHEMA,
|
||||||
PLATFORM_SCHEMA_BASE,
|
PLATFORM_SCHEMA_BASE,
|
||||||
@ -190,6 +191,14 @@ class BinarySensorEntity(Entity):
|
|||||||
_attr_is_on: bool | None = None
|
_attr_is_on: bool | None = None
|
||||||
_attr_state: None = None
|
_attr_state: None = None
|
||||||
|
|
||||||
|
async def async_internal_added_to_hass(self) -> None:
|
||||||
|
"""Call when the binary sensor entity is added to hass."""
|
||||||
|
await super().async_internal_added_to_hass()
|
||||||
|
if self.entity_category == EntityCategory.CONFIG:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
f"Entity {self.entity_id} cannot be added as the entity category is set to config"
|
||||||
|
)
|
||||||
|
|
||||||
def _default_to_device_class_name(self) -> bool:
|
def _default_to_device_class_name(self) -> bool:
|
||||||
"""Return True if an unnamed entity should be named by its device class.
|
"""Return True if an unnamed entity should be named by its device class.
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
self.device_config["name"] = product.name
|
self.device_config["name"] = product.name
|
||||||
# Check if configured but IP changed since
|
# Check if configured but IP changed since
|
||||||
await self.async_set_unique_id(product.unique_id)
|
await self.async_set_unique_id(product.unique_id)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||||
self.context.update(
|
self.context.update(
|
||||||
{
|
{
|
||||||
"title_placeholders": {
|
"title_placeholders": {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user