mirror of
https://github.com/home-assistant/core.git
synced 2026-01-03 13:48:04 +00:00
Compare commits
1 Commits
hassio_sta
...
frontend-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
651312fb42 |
@@ -125,7 +125,6 @@ tests: &tests
|
||||
- tests/mock/**
|
||||
- tests/pylint/**
|
||||
- tests/scripts/**
|
||||
- tests/syrupy.py
|
||||
- tests/test_util/**
|
||||
- tests/testing_config/**
|
||||
- tests/util/**
|
||||
|
||||
45
.coveragerc
45
.coveragerc
@@ -36,7 +36,6 @@ omit =
|
||||
homeassistant/components/airnow/__init__.py
|
||||
homeassistant/components/airnow/sensor.py
|
||||
homeassistant/components/airq/__init__.py
|
||||
homeassistant/components/airq/coordinator.py
|
||||
homeassistant/components/airq/sensor.py
|
||||
homeassistant/components/airthings/__init__.py
|
||||
homeassistant/components/airthings/sensor.py
|
||||
@@ -198,6 +197,7 @@ omit =
|
||||
homeassistant/components/denonavr/__init__.py
|
||||
homeassistant/components/denonavr/media_player.py
|
||||
homeassistant/components/denonavr/receiver.py
|
||||
homeassistant/components/devolo_home_control/switch.py
|
||||
homeassistant/components/digital_ocean/*
|
||||
homeassistant/components/discogs/sensor.py
|
||||
homeassistant/components/discord/__init__.py
|
||||
@@ -226,7 +226,6 @@ omit =
|
||||
homeassistant/components/dublin_bus_transport/sensor.py
|
||||
homeassistant/components/dunehd/__init__.py
|
||||
homeassistant/components/dunehd/media_player.py
|
||||
homeassistant/components/dwd_weather_warnings/const.py
|
||||
homeassistant/components/dwd_weather_warnings/sensor.py
|
||||
homeassistant/components/dweet/*
|
||||
homeassistant/components/ebox/sensor.py
|
||||
@@ -250,8 +249,7 @@ omit =
|
||||
homeassistant/components/ecowitt/sensor.py
|
||||
homeassistant/components/eddystone_temperature/sensor.py
|
||||
homeassistant/components/edimax/switch.py
|
||||
homeassistant/components/edl21/__init__.py
|
||||
homeassistant/components/edl21/sensor.py
|
||||
homeassistant/components/edl21/*
|
||||
homeassistant/components/egardia/*
|
||||
homeassistant/components/eight_sleep/__init__.py
|
||||
homeassistant/components/eight_sleep/binary_sensor.py
|
||||
@@ -396,8 +394,7 @@ omit =
|
||||
homeassistant/components/fritzbox_callmonitor/__init__.py
|
||||
homeassistant/components/fritzbox_callmonitor/base.py
|
||||
homeassistant/components/fritzbox_callmonitor/sensor.py
|
||||
homeassistant/components/frontier_silicon/__init__.py
|
||||
homeassistant/components/frontier_silicon/browse_media.py
|
||||
homeassistant/components/frontier_silicon/const.py
|
||||
homeassistant/components/frontier_silicon/media_player.py
|
||||
homeassistant/components/futurenow/light.py
|
||||
homeassistant/components/garadget/cover.py
|
||||
@@ -519,6 +516,9 @@ omit =
|
||||
homeassistant/components/ifttt/alarm_control_panel.py
|
||||
homeassistant/components/iglo/light.py
|
||||
homeassistant/components/ihc/*
|
||||
homeassistant/components/imap/__init__.py
|
||||
homeassistant/components/imap/coordinator.py
|
||||
homeassistant/components/imap/sensor.py
|
||||
homeassistant/components/imap_email_content/sensor.py
|
||||
homeassistant/components/incomfort/*
|
||||
homeassistant/components/insteon/binary_sensor.py
|
||||
@@ -627,6 +627,9 @@ omit =
|
||||
homeassistant/components/lg_netcast/media_player.py
|
||||
homeassistant/components/lg_soundbar/__init__.py
|
||||
homeassistant/components/lg_soundbar/media_player.py
|
||||
homeassistant/components/lidarr/__init__.py
|
||||
homeassistant/components/lidarr/coordinator.py
|
||||
homeassistant/components/lidarr/sensor.py
|
||||
homeassistant/components/life360/__init__.py
|
||||
homeassistant/components/life360/coordinator.py
|
||||
homeassistant/components/life360/device_tracker.py
|
||||
@@ -637,10 +640,8 @@ omit =
|
||||
homeassistant/components/linux_battery/sensor.py
|
||||
homeassistant/components/lirc/*
|
||||
homeassistant/components/livisi/__init__.py
|
||||
homeassistant/components/livisi/binary_sensor.py
|
||||
homeassistant/components/livisi/climate.py
|
||||
homeassistant/components/livisi/coordinator.py
|
||||
homeassistant/components/livisi/entity.py
|
||||
homeassistant/components/livisi/switch.py
|
||||
homeassistant/components/llamalab_automate/notify.py
|
||||
homeassistant/components/logi_circle/__init__.py
|
||||
@@ -673,6 +674,7 @@ omit =
|
||||
homeassistant/components/lyric/api.py
|
||||
homeassistant/components/lyric/climate.py
|
||||
homeassistant/components/lyric/sensor.py
|
||||
homeassistant/components/magicseaweed/sensor.py
|
||||
homeassistant/components/mailgun/notify.py
|
||||
homeassistant/components/map/*
|
||||
homeassistant/components/mastodon/notify.py
|
||||
@@ -773,11 +775,7 @@ omit =
|
||||
homeassistant/components/nexia/climate.py
|
||||
homeassistant/components/nexia/entity.py
|
||||
homeassistant/components/nexia/switch.py
|
||||
homeassistant/components/nextcloud/__init__.py
|
||||
homeassistant/components/nextcloud/binary_sensor.py
|
||||
homeassistant/components/nextcloud/coordinator.py
|
||||
homeassistant/components/nextcloud/entity.py
|
||||
homeassistant/components/nextcloud/sensor.py
|
||||
homeassistant/components/nextcloud/*
|
||||
homeassistant/components/nfandroidtv/__init__.py
|
||||
homeassistant/components/nfandroidtv/notify.py
|
||||
homeassistant/components/nibe_heatpump/__init__.py
|
||||
@@ -809,8 +807,6 @@ omit =
|
||||
homeassistant/components/nuki/sensor.py
|
||||
homeassistant/components/nx584/alarm_control_panel.py
|
||||
homeassistant/components/oasa_telematics/sensor.py
|
||||
homeassistant/components/obihai/__init__.py
|
||||
homeassistant/components/obihai/button.py
|
||||
homeassistant/components/obihai/connectivity.py
|
||||
homeassistant/components/obihai/sensor.py
|
||||
homeassistant/components/octoprint/__init__.py
|
||||
@@ -939,7 +935,6 @@ omit =
|
||||
homeassistant/components/pushover/notify.py
|
||||
homeassistant/components/pushsafer/notify.py
|
||||
homeassistant/components/pyload/sensor.py
|
||||
homeassistant/components/qbittorrent/__init__.py
|
||||
homeassistant/components/qbittorrent/sensor.py
|
||||
homeassistant/components/qnap/sensor.py
|
||||
homeassistant/components/qrcode/image_processing.py
|
||||
@@ -976,23 +971,18 @@ omit =
|
||||
homeassistant/components/rejseplanen/sensor.py
|
||||
homeassistant/components/remember_the_milk/__init__.py
|
||||
homeassistant/components/remote_rpi_gpio/*
|
||||
homeassistant/components/reolink/__init__.py
|
||||
homeassistant/components/reolink/binary_sensor.py
|
||||
homeassistant/components/reolink/button.py
|
||||
homeassistant/components/reolink/camera.py
|
||||
homeassistant/components/reolink/entity.py
|
||||
homeassistant/components/reolink/host.py
|
||||
homeassistant/components/reolink/light.py
|
||||
homeassistant/components/reolink/number.py
|
||||
homeassistant/components/reolink/select.py
|
||||
homeassistant/components/reolink/siren.py
|
||||
homeassistant/components/reolink/switch.py
|
||||
homeassistant/components/reolink/update.py
|
||||
homeassistant/components/repetier/__init__.py
|
||||
homeassistant/components/repetier/sensor.py
|
||||
homeassistant/components/rest/notify.py
|
||||
homeassistant/components/rest/switch.py
|
||||
homeassistant/components/ridwell/__init__.py
|
||||
homeassistant/components/ridwell/calendar.py
|
||||
homeassistant/components/ridwell/coordinator.py
|
||||
homeassistant/components/ridwell/switch.py
|
||||
homeassistant/components/ring/camera.py
|
||||
@@ -1102,9 +1092,7 @@ omit =
|
||||
homeassistant/components/sms/notify.py
|
||||
homeassistant/components/sms/sensor.py
|
||||
homeassistant/components/smtp/notify.py
|
||||
homeassistant/components/snapcast/__init__.py
|
||||
homeassistant/components/snapcast/media_player.py
|
||||
homeassistant/components/snapcast/server.py
|
||||
homeassistant/components/snapcast/*
|
||||
homeassistant/components/snmp/device_tracker.py
|
||||
homeassistant/components/snmp/sensor.py
|
||||
homeassistant/components/snmp/switch.py
|
||||
@@ -1293,11 +1281,9 @@ omit =
|
||||
homeassistant/components/touchline/climate.py
|
||||
homeassistant/components/tplink_lte/*
|
||||
homeassistant/components/tplink_omada/__init__.py
|
||||
homeassistant/components/tplink_omada/controller.py
|
||||
homeassistant/components/tplink_omada/coordinator.py
|
||||
homeassistant/components/tplink_omada/entity.py
|
||||
homeassistant/components/tplink_omada/switch.py
|
||||
homeassistant/components/tplink_omada/update.py
|
||||
homeassistant/components/traccar/device_tracker.py
|
||||
homeassistant/components/tractive/__init__.py
|
||||
homeassistant/components/tractive/binary_sensor.py
|
||||
@@ -1365,7 +1351,6 @@ omit =
|
||||
homeassistant/components/velbus/entity.py
|
||||
homeassistant/components/velbus/light.py
|
||||
homeassistant/components/velbus/sensor.py
|
||||
homeassistant/components/velbus/select.py
|
||||
homeassistant/components/velbus/switch.py
|
||||
homeassistant/components/velux/__init__.py
|
||||
homeassistant/components/velux/cover.py
|
||||
@@ -1383,6 +1368,7 @@ omit =
|
||||
homeassistant/components/verisure/sensor.py
|
||||
homeassistant/components/verisure/switch.py
|
||||
homeassistant/components/versasense/*
|
||||
homeassistant/components/vesync/__init__.py
|
||||
homeassistant/components/vesync/common.py
|
||||
homeassistant/components/vesync/fan.py
|
||||
homeassistant/components/vesync/light.py
|
||||
@@ -1439,6 +1425,7 @@ omit =
|
||||
homeassistant/components/xbox/media_player.py
|
||||
homeassistant/components/xbox/remote.py
|
||||
homeassistant/components/xbox/sensor.py
|
||||
homeassistant/components/xbox_live/sensor.py
|
||||
homeassistant/components/xeoma/camera.py
|
||||
homeassistant/components/xiaomi/camera.py
|
||||
homeassistant/components/xiaomi_aqara/__init__.py
|
||||
@@ -1511,7 +1498,7 @@ omit =
|
||||
homeassistant/components/zeversolar/coordinator.py
|
||||
homeassistant/components/zeversolar/entity.py
|
||||
homeassistant/components/zeversolar/sensor.py
|
||||
homeassistant/components/zha/websocket_api.py
|
||||
homeassistant/components/zha/api.py
|
||||
homeassistant/components/zha/core/channels/*
|
||||
homeassistant/components/zha/core/device.py
|
||||
homeassistant/components/zha/core/gateway.py
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.pycodestylePath": "/usr/local/bin/pycodestyle",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
8
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -31,9 +31,9 @@ body:
|
||||
label: What version of Home Assistant Core has the issue?
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in: [Settings ⇒ System ⇒ Repairs ⇒ Three Dots in Upper Right ⇒ System information](https://my.home-assistant.io/redirect/system_health/).
|
||||
Can be found in: [Settings -> About](https://my.home-assistant.io/redirect/info/).
|
||||
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
[](https://my.home-assistant.io/redirect/info/)
|
||||
- type: input
|
||||
attributes:
|
||||
label: What was the last working version of Home Assistant Core?
|
||||
@@ -46,9 +46,9 @@ body:
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
description: >
|
||||
Can be found in: [Settings ⇒ System ⇒ Repairs ⇒ Three Dots in Upper Right ⇒ System information](https://my.home-assistant.io/redirect/system_health/).
|
||||
Can be found in: [Settings -> System-> Repairs -> Three Dots in Upper Right -> System information](https://my.home-assistant.io/redirect/system_health/).
|
||||
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
options:
|
||||
- Home Assistant OS
|
||||
- Home Assistant Container
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,6 +1,6 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Report a bug with the UI, Frontend or Dashboards
|
||||
- name: Report a bug with the UI, Frontend or Lovelace
|
||||
url: https://github.com/home-assistant/frontend/issues
|
||||
about: This is the issue tracker for our backend. Please report issues with the UI in the frontend repository.
|
||||
- name: Report incorrect or missing information on our website
|
||||
|
||||
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -59,7 +59,6 @@
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
||||
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
@@ -108,4 +107,3 @@ To help with the load of incoming pull requests:
|
||||
[manifest-docs]: https://developers.home-assistant.io/docs/en/creating_integration_manifest.html
|
||||
[quality-scale]: https://developers.home-assistant.io/docs/en/next/integration_quality_scale_index.html
|
||||
[docs-repository]: https://github.com/home-assistant/home-assistant.io
|
||||
[perfect-pr]: https://developers.home-assistant.io/docs/review-process/#creating-the-perfect-pr
|
||||
|
||||
17
.github/workflows/builder.yml
vendored
17
.github/workflows/builder.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2023.03.0
|
||||
uses: home-assistant/builder@2022.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -232,7 +232,6 @@ jobs:
|
||||
- khadas-vim3
|
||||
- odroid-c2
|
||||
- odroid-c4
|
||||
- odroid-m1
|
||||
- odroid-n2
|
||||
- odroid-xu
|
||||
- qemuarm
|
||||
@@ -249,7 +248,7 @@ jobs:
|
||||
- yellow
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -276,7 +275,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2023.03.0
|
||||
uses: home-assistant/builder@2022.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -292,7 +291,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -331,7 +330,7 @@ jobs:
|
||||
- "homeassistant"
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'homeassistant'
|
||||
|
||||
161
.github/workflows/ci.yaml
vendored
161
.github/workflows/ci.yaml
vendored
@@ -31,7 +31,7 @@ env:
|
||||
CACHE_VERSION: 5
|
||||
PIP_CACHE_VERSION: 4
|
||||
MYPY_CACHE_VERSION: 4
|
||||
HA_SHORT_VERSION: 2023.5
|
||||
HA_SHORT_VERSION: 2023.4
|
||||
DEFAULT_PYTHON: "3.10"
|
||||
ALL_PYTHON_VERSIONS: "['3.10', '3.11']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -40,9 +40,7 @@ env:
|
||||
# - 10.6.10 is the version currently shipped with the Add-on (as of 31 Jan 2023)
|
||||
# 10.10 is the latest short-term-support
|
||||
# - 10.10.3 is the latest (as of 6 Feb 2023)
|
||||
# mysql 8.0.32 does not always behave the same as MariaDB
|
||||
# and some queries that work on MariaDB do not work on MySQL
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mysql:8.0.32']"
|
||||
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3']"
|
||||
# 12 is the oldest supported version
|
||||
# - 12.14 is the latest (as of 9 Feb 2023)
|
||||
# 15 is the latest version
|
||||
@@ -81,7 +79,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: >-
|
||||
@@ -205,7 +203,7 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -214,7 +212,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -229,10 +227,9 @@ jobs:
|
||||
pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
@@ -250,7 +247,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
@@ -259,7 +256,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -268,7 +265,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -288,15 +285,15 @@ jobs:
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
@@ -305,7 +302,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -314,7 +311,56 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8 (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual flake8 --all-files
|
||||
- name: Run flake8 (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual flake8 --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- info
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -345,7 +391,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
@@ -354,7 +400,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -363,7 +409,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -383,7 +429,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
id: python
|
||||
@@ -392,7 +438,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -401,7 +447,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -409,6 +455,19 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
|
||||
- name: Run pyupgrade (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
- name: Run pyupgrade (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual pyupgrade --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
@@ -436,7 +495,6 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
@@ -490,7 +548,7 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -504,16 +562,15 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
lookup-only: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore pip wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: ${{ env.PIP_CACHE }}
|
||||
key: >-
|
||||
@@ -558,7 +615,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -567,7 +624,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -590,7 +647,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -599,7 +656,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -623,7 +680,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -632,7 +689,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -667,7 +724,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -683,7 +740,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -691,7 +748,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -733,7 +790,7 @@ jobs:
|
||||
name: Run pip check ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -742,7 +799,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -786,7 +843,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -795,7 +852,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -912,7 +969,7 @@ jobs:
|
||||
ffmpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -921,7 +978,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache/restore@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -944,10 +1001,6 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pip install mysqlclient sqlalchemy_utils
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 -m script.translations develop --all
|
||||
- name: Run pytest (partially)
|
||||
timeout-minutes: 20
|
||||
shell: bash
|
||||
@@ -1020,7 +1073,7 @@ jobs:
|
||||
ffmpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v4.5.0
|
||||
@@ -1029,7 +1082,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v3.3.1
|
||||
uses: actions/cache@v3.2.6
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1052,10 +1105,6 @@ jobs:
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pip install psycopg2 sqlalchemy_utils
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python3 -m script.translations develop --all
|
||||
- name: Run pytest (partially)
|
||||
timeout-minutes: 20
|
||||
shell: bash
|
||||
@@ -1094,20 +1143,16 @@ jobs:
|
||||
needs:
|
||||
- info
|
||||
- pytest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
- name: Upload coverage to Codecov (partial coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v3.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
|
||||
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 90 days stale PRs policy
|
||||
uses: actions/stale@v8.0.0
|
||||
uses: actions/stale@v7.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 90
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v8.0.0
|
||||
uses: actions/stale@v7.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v8.0.0
|
||||
uses: actions/stale@v7.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v4.5.0
|
||||
|
||||
13
.github/workflows/wheels.yml
vendored
13
.github/workflows/wheels.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
@@ -54,9 +54,6 @@ jobs:
|
||||
# OpenCV headless installation
|
||||
echo "CI_BUILD=1"
|
||||
echo "ENABLE_HEADLESS=1"
|
||||
|
||||
# Use C-Extension for sqlalchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
@@ -82,7 +79,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -119,7 +116,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3.5.0
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -179,7 +176,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;grpcio;sqlalchemy
|
||||
skip-binary: aiohttp;grpcio
|
||||
legacy: true
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -194,7 +191,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;grpcio;sqlalchemy
|
||||
skip-binary: aiohttp;grpcio
|
||||
legacy: true
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -1,12 +1,26 @@
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.260
|
||||
rev: v0.0.247
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py310-plus]
|
||||
stages: [manual]
|
||||
- repo: https://github.com/PyCQA/autoflake
|
||||
rev: v2.0.0
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
- --in-place
|
||||
- --remove-all-unused-imports
|
||||
stages: [manual]
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
@@ -22,6 +36,20 @@ repos:
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- pycodestyle==2.10.0
|
||||
- pyflakes==3.0.1
|
||||
- flake8-docstrings==1.6.0
|
||||
- pydocstyle==6.2.3
|
||||
- flake8-comprehensions==3.10.1
|
||||
- flake8-noqa==1.3.0
|
||||
- mccabe==0.7.0
|
||||
exclude: docs/source/conf.py
|
||||
stages: [manual]
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.4
|
||||
hooks:
|
||||
|
||||
@@ -4,5 +4,3 @@ azure-*.yml
|
||||
docs/source/_templates/*
|
||||
homeassistant/components/*/translations/*.json
|
||||
homeassistant/generated/*
|
||||
tests/components/lidarr/fixtures/initialize.js
|
||||
tests/components/lidarr/fixtures/initialize-wrong.js
|
||||
|
||||
@@ -137,7 +137,6 @@ homeassistant.components.hardkernel.*
|
||||
homeassistant.components.hardware.*
|
||||
homeassistant.components.here_travel_time.*
|
||||
homeassistant.components.history.*
|
||||
homeassistant.components.homeassistant.exposed_entities
|
||||
homeassistant.components.homeassistant.triggers.event
|
||||
homeassistant.components.homeassistant_alerts.*
|
||||
homeassistant.components.homeassistant_hardware.*
|
||||
@@ -298,7 +297,6 @@ homeassistant.components.tag.*
|
||||
homeassistant.components.tailscale.*
|
||||
homeassistant.components.tautulli.*
|
||||
homeassistant.components.tcp.*
|
||||
homeassistant.components.threshold.*
|
||||
homeassistant.components.tibber.*
|
||||
homeassistant.components.tile.*
|
||||
homeassistant.components.tilt_ble.*
|
||||
@@ -312,7 +310,7 @@ homeassistant.components.trafikverket_train.*
|
||||
homeassistant.components.trafikverket_weatherstation.*
|
||||
homeassistant.components.tts.*
|
||||
homeassistant.components.twentemilieu.*
|
||||
homeassistant.components.unifi.*
|
||||
homeassistant.components.unifi.update
|
||||
homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -42,6 +42,20 @@
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "pre-commit run flake8 --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
|
||||
@@ -25,7 +25,7 @@ rules:
|
||||
comments:
|
||||
level: error
|
||||
require-starting-space: true
|
||||
min-spaces-from-content: 1
|
||||
min-spaces-from-content: 2
|
||||
comments-indentation:
|
||||
level: error
|
||||
document-end:
|
||||
|
||||
44
CODEOWNERS
44
CODEOWNERS
@@ -80,8 +80,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/android_ip_webcam/ @engrbm87
|
||||
/homeassistant/components/androidtv/ @JeffLIrion @ollo69
|
||||
/tests/components/androidtv/ @JeffLIrion @ollo69
|
||||
/homeassistant/components/androidtv_remote/ @tronikos
|
||||
/tests/components/androidtv_remote/ @tronikos
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
/tests/components/anthemav/ @hyralex
|
||||
/homeassistant/components/apache_kafka/ @bachya
|
||||
@@ -217,6 +215,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
/tests/components/coolmaster/ @OnFreund
|
||||
/homeassistant/components/coronavirus/ @home-assistant/core
|
||||
/tests/components/coronavirus/ @home-assistant/core
|
||||
/homeassistant/components/counter/ @fabaff
|
||||
/tests/components/counter/ @fabaff
|
||||
/homeassistant/components/cover/ @home-assistant/core
|
||||
@@ -228,6 +228,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/darksky/ @fabaff
|
||||
/tests/components/darksky/ @fabaff
|
||||
/homeassistant/components/debugpy/ @frenck
|
||||
/tests/components/debugpy/ @frenck
|
||||
/homeassistant/components/deconz/ @Kane610
|
||||
@@ -281,7 +283,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr_reader/ @depl0y @glodenox
|
||||
/homeassistant/components/dunehd/ @bieniu
|
||||
/tests/components/dunehd/ @bieniu
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @Hummel95 @andarotajo
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @Hummel95
|
||||
/homeassistant/components/dynalite/ @ziv1234
|
||||
/tests/components/dynalite/ @ziv1234
|
||||
/homeassistant/components/eafm/ @Jc2k
|
||||
@@ -399,7 +401,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/frontend/ @home-assistant/frontend
|
||||
/tests/components/frontend/ @home-assistant/frontend
|
||||
/homeassistant/components/frontier_silicon/ @wlcrs
|
||||
/tests/components/frontier_silicon/ @wlcrs
|
||||
/homeassistant/components/fully_kiosk/ @cgarwood
|
||||
/tests/components/fully_kiosk/ @cgarwood
|
||||
/homeassistant/components/garages_amsterdam/ @klaasnicolaas
|
||||
@@ -658,8 +659,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/litejet/ @joncar
|
||||
/homeassistant/components/litterrobot/ @natekspencer @tkdrob
|
||||
/tests/components/litterrobot/ @natekspencer @tkdrob
|
||||
/homeassistant/components/livisi/ @StefanIacobLivisi @planbnet
|
||||
/tests/components/livisi/ @StefanIacobLivisi @planbnet
|
||||
/homeassistant/components/livisi/ @StefanIacobLivisi
|
||||
/tests/components/livisi/ @StefanIacobLivisi
|
||||
/homeassistant/components/local_calendar/ @allenporter
|
||||
/tests/components/local_calendar/ @allenporter
|
||||
/homeassistant/components/local_ip/ @issacg
|
||||
@@ -783,8 +784,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nexia/ @bdraco
|
||||
/homeassistant/components/nextbus/ @vividboarder
|
||||
/tests/components/nextbus/ @vividboarder
|
||||
/homeassistant/components/nextcloud/ @mib1185
|
||||
/tests/components/nextcloud/ @mib1185
|
||||
/homeassistant/components/nextcloud/ @meichthys
|
||||
/homeassistant/components/nextdns/ @bieniu
|
||||
/tests/components/nextdns/ @bieniu
|
||||
/homeassistant/components/nfandroidtv/ @tkdrob
|
||||
@@ -931,7 +931,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/pvpc_hourly_pricing/ @azogue
|
||||
/tests/components/pvpc_hourly_pricing/ @azogue
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse
|
||||
/tests/components/qbittorrent/ @geoffreylagaisse
|
||||
/homeassistant/components/qingping/ @bdraco @skgsergio
|
||||
/tests/components/qingping/ @bdraco @skgsergio
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
@@ -977,8 +976,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/repairs/ @home-assistant/core
|
||||
/tests/components/repairs/ @home-assistant/core
|
||||
/homeassistant/components/repetier/ @MTrab @ShadowBr0ther
|
||||
/homeassistant/components/rest/ @epenet
|
||||
/tests/components/rest/ @epenet
|
||||
/homeassistant/components/rflink/ @javicalle
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
@@ -1059,8 +1056,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/seven_segments/ @fabaff
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @AritroSaha10
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch @AritroSaha10
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
@@ -1104,8 +1101,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
@@ -1157,8 +1152,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
/tests/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
/homeassistant/components/stt/ @home-assistant/core @pvizeli
|
||||
/tests/components/stt/ @home-assistant/core @pvizeli
|
||||
/homeassistant/components/stt/ @pvizeli
|
||||
/tests/components/stt/ @pvizeli
|
||||
/homeassistant/components/subaru/ @G-Two
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii
|
||||
@@ -1218,6 +1213,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/thethingsnetwork/ @fabaff
|
||||
/homeassistant/components/thread/ @home-assistant/core
|
||||
/tests/components/thread/ @home-assistant/core
|
||||
/homeassistant/components/threshold/ @fabaff
|
||||
/tests/components/threshold/ @fabaff
|
||||
/homeassistant/components/tibber/ @danielhiversen
|
||||
/tests/components/tibber/ @danielhiversen
|
||||
/homeassistant/components/tile/ @bachya
|
||||
@@ -1296,13 +1293,15 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/velux/ @Julius2342
|
||||
/homeassistant/components/venstar/ @garbled1
|
||||
/tests/components/venstar/ @garbled1
|
||||
/homeassistant/components/verisure/ @frenck @niro1987
|
||||
/tests/components/verisure/ @frenck @niro1987
|
||||
/homeassistant/components/verisure/ @frenck
|
||||
/tests/components/verisure/ @frenck
|
||||
/homeassistant/components/versasense/ @flamm3blemuff1n
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey
|
||||
/homeassistant/components/vicare/ @oischinger
|
||||
/tests/components/vicare/ @oischinger
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
@@ -1310,8 +1309,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vizio/ @raman325
|
||||
/homeassistant/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/voice_assistant/ @balloob @synesthesiam
|
||||
/tests/components/voice_assistant/ @balloob @synesthesiam
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
@@ -1357,14 +1354,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wolflink/ @adamkrol93
|
||||
/tests/components/wolflink/ @adamkrol93
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
/tests/components/workday/ @fabaff @gjohansson-ST
|
||||
/homeassistant/components/workday/ @fabaff
|
||||
/tests/components/workday/ @fabaff
|
||||
/homeassistant/components/worldclock/ @fabaff
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xbox_live/ @MartinHjelmare
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
@@ -5,6 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# Uninstall pre-installed formatting and linting tools
|
||||
# They would conflict with our pinned versions
|
||||
RUN pipx uninstall black
|
||||
RUN pipx uninstall flake8
|
||||
RUN pipx uninstall pydocstyle
|
||||
RUN pipx uninstall pycodestyle
|
||||
RUN pipx uninstall mypy
|
||||
|
||||
@@ -4,7 +4,7 @@ Home Assistant |Chat Status|
|
||||
Open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server.
|
||||
|
||||
Check out `home-assistant.io <https://home-assistant.io>`__ for `a
|
||||
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
||||
demo <https://home-assistant.io/demo/>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
||||
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|
||||
|
||||
|screenshot-states|
|
||||
@@ -23,6 +23,6 @@ of a component, check the `Home Assistant help section <https://home-assistant.i
|
||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||
:target: https://discord.gg/c5DvZ4e
|
||||
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/master/docs/screenshots.png
|
||||
:target: https://demo.home-assistant.io
|
||||
:target: https://home-assistant.io/demo/
|
||||
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/docs/screenshot-integrations.png
|
||||
:target: https://home-assistant.io/integrations/
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import auth_store, jwt_wrapper, models
|
||||
from . import auth_store, models
|
||||
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN
|
||||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||
@@ -555,7 +555,9 @@ class AuthManager:
|
||||
) -> models.RefreshToken | None:
|
||||
"""Return refresh token if an access token is valid."""
|
||||
try:
|
||||
unverif_claims = jwt_wrapper.unverified_hs256_token_decode(token)
|
||||
unverif_claims = jwt.decode(
|
||||
token, algorithms=["HS256"], options={"verify_signature": False}
|
||||
)
|
||||
except jwt.InvalidTokenError:
|
||||
return None
|
||||
|
||||
@@ -571,9 +573,7 @@ class AuthManager:
|
||||
issuer = refresh_token.id
|
||||
|
||||
try:
|
||||
jwt_wrapper.verify_and_decode(
|
||||
token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"]
|
||||
)
|
||||
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
|
||||
except jwt.InvalidTokenError:
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
"""Provide a wrapper around JWT that caches decoding tokens.
|
||||
|
||||
Since we decode the same tokens over and over again
|
||||
we can cache the result of the decode of valid tokens
|
||||
to speed up the process.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import lru_cache, partial
|
||||
from typing import Any
|
||||
|
||||
from jwt import DecodeError, PyJWS, PyJWT
|
||||
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
JWT_TOKEN_CACHE_SIZE = 16
|
||||
MAX_TOKEN_SIZE = 8192
|
||||
|
||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
|
||||
|
||||
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
||||
"require": []
|
||||
}
|
||||
_NO_VERIFY_OPTIONS = {f"verify_{key}": False for key in _VERIFY_KEYS}
|
||||
|
||||
|
||||
class _PyJWSWithLoadCache(PyJWS):
|
||||
"""PyJWS with a dedicated load implementation."""
|
||||
|
||||
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
|
||||
# We only ever have a global instance of this class
|
||||
# so we do not have to worry about the LRU growing
|
||||
# each time we create a new instance.
|
||||
def _load(self, jwt: str | bytes) -> tuple[bytes, bytes, dict, bytes]:
|
||||
"""Load a JWS."""
|
||||
return super()._load(jwt)
|
||||
|
||||
|
||||
_jws = _PyJWSWithLoadCache()
|
||||
|
||||
|
||||
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
|
||||
def _decode_payload(json_payload: str) -> dict[str, Any]:
|
||||
"""Decode the payload from a JWS dictionary."""
|
||||
try:
|
||||
payload = json_loads(json_payload)
|
||||
except ValueError as err:
|
||||
raise DecodeError(f"Invalid payload string: {err}") from err
|
||||
if not isinstance(payload, dict):
|
||||
raise DecodeError("Invalid payload string: must be a json object")
|
||||
return payload
|
||||
|
||||
|
||||
class _PyJWTWithVerify(PyJWT):
|
||||
"""PyJWT with a fast decode implementation."""
|
||||
|
||||
def decode_payload(
|
||||
self, jwt: str, key: str, options: dict[str, Any], algorithms: list[str]
|
||||
) -> dict[str, Any]:
|
||||
"""Decode a JWT's payload."""
|
||||
if len(jwt) > MAX_TOKEN_SIZE:
|
||||
# Avoid caching impossible tokens
|
||||
raise DecodeError("Token too large")
|
||||
return _decode_payload(
|
||||
_jws.decode_complete(
|
||||
jwt=jwt,
|
||||
key=key,
|
||||
algorithms=algorithms,
|
||||
options=options,
|
||||
)["payload"]
|
||||
)
|
||||
|
||||
def verify_and_decode(
|
||||
self,
|
||||
jwt: str,
|
||||
key: str,
|
||||
algorithms: list[str],
|
||||
issuer: str | None = None,
|
||||
leeway: int | float | timedelta = 0,
|
||||
options: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Verify a JWT's signature and claims."""
|
||||
merged_options = {**_VERIFY_OPTIONS, **(options or {})}
|
||||
payload = self.decode_payload(
|
||||
jwt=jwt,
|
||||
key=key,
|
||||
options=merged_options,
|
||||
algorithms=algorithms,
|
||||
)
|
||||
# These should never be missing since we verify them
|
||||
# but this is an additional safeguard to make sure
|
||||
# nothing slips through.
|
||||
assert "exp" in payload, "exp claim is required"
|
||||
assert "iat" in payload, "iat claim is required"
|
||||
self._validate_claims( # type: ignore[no-untyped-call]
|
||||
payload=payload,
|
||||
options=merged_options,
|
||||
issuer=issuer,
|
||||
leeway=leeway,
|
||||
)
|
||||
return payload
|
||||
|
||||
|
||||
_jwt = _PyJWTWithVerify() # type: ignore[no-untyped-call]
|
||||
verify_and_decode = _jwt.verify_and_decode
|
||||
unverified_hs256_token_decode = lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)(
|
||||
partial(
|
||||
_jwt.decode_payload, key="", algorithms=["HS256"], options=_NO_VERIFY_OPTIONS
|
||||
)
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"unverified_hs256_token_decode",
|
||||
"verify_and_decode",
|
||||
]
|
||||
@@ -6,12 +6,15 @@ from typing import TYPE_CHECKING
|
||||
import attr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
device_registry as dev_reg,
|
||||
entity_registry as ent_reg,
|
||||
)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class PermissionLookup:
|
||||
"""Class to hold data for permission lookups."""
|
||||
|
||||
entity_registry: er.EntityRegistry = attr.ib()
|
||||
device_registry: dr.DeviceRegistry = attr.ib()
|
||||
entity_registry: ent_reg.EntityRegistry = attr.ib()
|
||||
device_registry: dev_reg.DeviceRegistry = attr.ib()
|
||||
|
||||
@@ -8,7 +8,7 @@ from .util.async_ import protect_loop
|
||||
def enable() -> None:
|
||||
"""Enable the detection of blocking calls in the event loop."""
|
||||
# Prevent urllib3 and requests doing I/O in event loop
|
||||
HTTPConnection.putrequest = protect_loop( # type: ignore[method-assign]
|
||||
HTTPConnection.putrequest = protect_loop( # type: ignore[assignment]
|
||||
HTTPConnection.putrequest
|
||||
)
|
||||
|
||||
|
||||
@@ -31,7 +31,6 @@ from .helpers import (
|
||||
entity_registry,
|
||||
issue_registry,
|
||||
recorder,
|
||||
template,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send
|
||||
from .helpers.typing import ConfigType
|
||||
@@ -239,14 +238,12 @@ async def load_registries(hass: core.HomeAssistant) -> None:
|
||||
|
||||
# Load the registries and cache the result of platform.uname().processor
|
||||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
area_registry.async_load(hass),
|
||||
device_registry.async_load(hass),
|
||||
entity_registry.async_load(hass),
|
||||
issue_registry.async_load(hass),
|
||||
hass.async_add_executor_job(_cache_uname_processor),
|
||||
template.async_load_custom_templates(hass),
|
||||
)
|
||||
|
||||
|
||||
@@ -511,20 +508,19 @@ async def async_setup_multi_components(
|
||||
) -> None:
|
||||
"""Set up multiple domains. Log on failure."""
|
||||
futures = {
|
||||
domain: hass.async_create_task(
|
||||
async_setup_component(hass, domain, config), f"setup component {domain}"
|
||||
)
|
||||
domain: hass.async_create_task(async_setup_component(hass, domain, config))
|
||||
for domain in domains
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
result = results[idx]
|
||||
if isinstance(result, BaseException):
|
||||
_LOGGER.error(
|
||||
"Error setting up integration %s - received exception",
|
||||
domain,
|
||||
exc_info=(type(result), result, result.__traceback__),
|
||||
)
|
||||
await asyncio.wait(futures.values())
|
||||
errors = [domain for domain in domains if futures[domain].exception()]
|
||||
for domain in errors:
|
||||
exception = futures[domain].exception()
|
||||
assert exception is not None
|
||||
_LOGGER.error(
|
||||
"Error setting up integration %s - received exception",
|
||||
domain,
|
||||
exc_info=(type(exception), exception, exception.__traceback__),
|
||||
)
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "heltun",
|
||||
"name": "HELTUN",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "homeseer",
|
||||
"name": "HomeSeer",
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
||||
@@ -10,6 +10,7 @@
|
||||
"microsoft_face",
|
||||
"microsoft",
|
||||
"msteams",
|
||||
"xbox"
|
||||
"xbox",
|
||||
"xbox_live"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Contains components that can be plugged into Home Assistant.
|
||||
"""This package contains components that can be plugged into Home Assistant.
|
||||
|
||||
Component design guidelines:
|
||||
- Each component defines a constant DOMAIN that is equal to its filename.
|
||||
|
||||
@@ -17,6 +17,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import ATTR_FORECAST, CONF_FORECAST, DOMAIN, MANUFACTURER
|
||||
|
||||
@@ -115,7 +116,11 @@ class AccuWeatherDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
async with timeout(10):
|
||||
current = await self.accuweather.async_get_current_conditions()
|
||||
forecast = (
|
||||
await self.accuweather.async_get_forecast() if self.forecast else {}
|
||||
await self.accuweather.async_get_forecast(
|
||||
metric=self.hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
if self.forecast
|
||||
else {}
|
||||
)
|
||||
except (
|
||||
ApiError,
|
||||
|
||||
@@ -20,6 +20,7 @@ from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_WINDY,
|
||||
)
|
||||
|
||||
API_IMPERIAL: Final = "Imperial"
|
||||
API_METRIC: Final = "Metric"
|
||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||
ATTR_CATEGORY: Final = "Category"
|
||||
|
||||
@@ -26,9 +26,11 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from .const import (
|
||||
API_IMPERIAL,
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY,
|
||||
ATTR_DIRECTION,
|
||||
@@ -49,7 +51,7 @@ PARALLEL_UPDATES = 1
|
||||
class AccuWeatherSensorDescriptionMixin:
|
||||
"""Mixin for AccuWeather sensor."""
|
||||
|
||||
value_fn: Callable[[dict[str, Any]], StateType]
|
||||
value_fn: Callable[[dict[str, Any], str], StateType]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -59,6 +61,8 @@ class AccuWeatherSensorDescription(
|
||||
"""Class describing AccuWeather sensor entities."""
|
||||
|
||||
attr_fn: Callable[[dict[str, Any]], dict[str, StateType]] = lambda _: {}
|
||||
metric_unit: str | None = None
|
||||
us_customary_unit: str | None = None
|
||||
|
||||
|
||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
@@ -68,7 +72,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Cloud cover day",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="CloudCoverNight",
|
||||
@@ -76,7 +80,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Cloud cover night",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Grass",
|
||||
@@ -84,7 +88,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Grass pollen",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -92,7 +96,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
icon="mdi:weather-partly-cloudy",
|
||||
name="Hours of sun",
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
value_fn=lambda data: cast(float, data),
|
||||
value_fn=lambda data, _: cast(float, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Mold",
|
||||
@@ -100,7 +104,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Mold pollen",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -108,7 +112,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
icon="mdi:vector-triangle",
|
||||
name="Ozone",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -117,52 +121,56 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Ragweed pollen",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature max",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature min",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShadeMax",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature shade max",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShadeMin",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature shade min",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="ThunderstormProbabilityDay",
|
||||
icon="mdi:weather-lightning",
|
||||
name="Thunderstorm probability day",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="ThunderstormProbabilityNight",
|
||||
icon="mdi:weather-lightning",
|
||||
name="Thunderstorm probability night",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Tree",
|
||||
@@ -170,7 +178,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Tree pollen",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -178,7 +186,7 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
icon="mdi:weather-sunny",
|
||||
name="UV index",
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||
value_fn=lambda data, _: cast(int, data[ATTR_VALUE]),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -186,8 +194,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind gust day",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -195,24 +204,27 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind gust night",
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindDay",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind day",
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindNight",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind night",
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, _: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
|
||||
),
|
||||
)
|
||||
@@ -224,8 +236,9 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Apparent temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Ceiling",
|
||||
@@ -233,8 +246,9 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
icon="mdi:weather-fog",
|
||||
name="Cloud ceiling",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfLength.METERS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfLength.METERS,
|
||||
us_customary_unit=UnitOfLength.FEET,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -244,7 +258,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="DewPoint",
|
||||
@@ -252,16 +266,18 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Dew point",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
name="RealFeel temperature",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RealFeelTemperatureShade",
|
||||
@@ -269,16 +285,18 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="RealFeel temperature shade",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Precipitation",
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
name="Precipitation",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
attr_fn=lambda data: {"type": data["PrecipitationType"]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -288,7 +306,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Pressure tendency",
|
||||
options=["falling", "rising", "steady"],
|
||||
translation_key="pressure_tendency",
|
||||
value_fn=lambda data: cast(str, data["LocalizedText"]).lower(),
|
||||
value_fn=lambda data, _: cast(str, data["LocalizedText"]).lower(),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="UVIndex",
|
||||
@@ -296,7 +314,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="UV index",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
value_fn=lambda data, _: cast(int, data),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]},
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
@@ -305,8 +323,9 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Wet bulb temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindChillTemperature",
|
||||
@@ -314,16 +333,18 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Wind chill temperature",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfTemperature.CELSIUS,
|
||||
us_customary_unit=UnitOfTemperature.FAHRENHEIT,
|
||||
value_fn=lambda data, unit: cast(float, data[unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Wind",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
name="Wind",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[ATTR_SPEED][unit][ATTR_VALUE]),
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="WindGust",
|
||||
@@ -331,8 +352,9 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||
name="Wind gust",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
|
||||
metric_unit=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
us_customary_unit=UnitOfSpeed.MILES_PER_HOUR,
|
||||
value_fn=lambda data, unit: cast(float, data[ATTR_SPEED][unit][ATTR_VALUE]),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -352,7 +374,7 @@ async def async_setup_entry(
|
||||
# Some air quality/allergy sensors are only available for certain
|
||||
# locations.
|
||||
sensors.extend(
|
||||
AccuWeatherSensor(coordinator, description, forecast_day=day)
|
||||
AccuWeatherForecastSensor(coordinator, description, forecast_day=day)
|
||||
for day in range(MAX_FORECAST_DAYS + 1)
|
||||
for description in FORECAST_SENSOR_TYPES
|
||||
if description.key in coordinator.data[ATTR_FORECAST][0]
|
||||
@@ -391,27 +413,34 @@ class AccuWeatherSensor(
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.location_key}-{description.key}".lower()
|
||||
)
|
||||
self._attr_native_unit_of_measurement = description.native_unit_of_measurement
|
||||
if self.coordinator.hass.config.units is METRIC_SYSTEM:
|
||||
self._unit_system = API_METRIC
|
||||
if metric_unit := description.metric_unit:
|
||||
self._attr_native_unit_of_measurement = metric_unit
|
||||
else:
|
||||
self._unit_system = API_IMPERIAL
|
||||
if us_customary_unit := description.us_customary_unit:
|
||||
self._attr_native_unit_of_measurement = us_customary_unit
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self.forecast_day = forecast_day
|
||||
if forecast_day is not None:
|
||||
self.forecast_day = forecast_day
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state."""
|
||||
return self.entity_description.value_fn(self._sensor_data)
|
||||
return self.entity_description.value_fn(self._sensor_data, self._unit_system)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
if self.forecast_day is not None:
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
return self.entity_description.attr_fn(self.coordinator.data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = _get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key, self.forecast_day
|
||||
self.coordinator.data, self.entity_description.key
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -429,3 +458,20 @@ def _get_sensor_data(
|
||||
return sensors["PrecipitationSummary"]["PastHour"]
|
||||
|
||||
return sensors[kind]
|
||||
|
||||
|
||||
class AccuWeatherForecastSensor(AccuWeatherSensor):
|
||||
"""Define an AccuWeather forecast entity."""
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
return self.entity_description.attr_fn(self._sensor_data)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle data update."""
|
||||
self._sensor_data = _get_sensor_data(
|
||||
self.coordinator.data, self.entity_description.key, self.forecast_day
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -28,9 +28,17 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from . import AccuWeatherDataUpdateCoordinator
|
||||
from .const import API_METRIC, ATTR_FORECAST, ATTRIBUTION, CONDITION_CLASSES, DOMAIN
|
||||
from .const import (
|
||||
API_IMPERIAL,
|
||||
API_METRIC,
|
||||
ATTR_FORECAST,
|
||||
ATTRIBUTION,
|
||||
CONDITION_CLASSES,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -58,11 +66,20 @@ class AccuWeatherEntity(
|
||||
# Coordinator data is used also for sensors which don't have units automatically
|
||||
# converted, hence the weather entity's native units follow the configured unit
|
||||
# system
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
self._attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
if coordinator.hass.config.units is METRIC_SYSTEM:
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
self._attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
self._unit_system = API_METRIC
|
||||
else:
|
||||
self._unit_system = API_IMPERIAL
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.INCHES
|
||||
self._attr_native_pressure_unit = UnitOfPressure.INHG
|
||||
self._attr_native_temperature_unit = UnitOfTemperature.FAHRENHEIT
|
||||
self._attr_native_visibility_unit = UnitOfLength.MILES
|
||||
self._attr_native_wind_speed_unit = UnitOfSpeed.MILES_PER_HOUR
|
||||
self._attr_unique_id = coordinator.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = coordinator.device_info
|
||||
@@ -82,12 +99,16 @@ class AccuWeatherEntity(
|
||||
@property
|
||||
def native_temperature(self) -> float:
|
||||
"""Return the temperature."""
|
||||
return cast(float, self.coordinator.data["Temperature"][API_METRIC]["Value"])
|
||||
return cast(
|
||||
float, self.coordinator.data["Temperature"][self._unit_system]["Value"]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_pressure(self) -> float:
|
||||
"""Return the pressure."""
|
||||
return cast(float, self.coordinator.data["Pressure"][API_METRIC]["Value"])
|
||||
return cast(
|
||||
float, self.coordinator.data["Pressure"][self._unit_system]["Value"]
|
||||
)
|
||||
|
||||
@property
|
||||
def humidity(self) -> int:
|
||||
@@ -97,7 +118,9 @@ class AccuWeatherEntity(
|
||||
@property
|
||||
def native_wind_speed(self) -> float:
|
||||
"""Return the wind speed."""
|
||||
return cast(float, self.coordinator.data["Wind"]["Speed"][API_METRIC]["Value"])
|
||||
return cast(
|
||||
float, self.coordinator.data["Wind"]["Speed"][self._unit_system]["Value"]
|
||||
)
|
||||
|
||||
@property
|
||||
def wind_bearing(self) -> int:
|
||||
@@ -107,7 +130,9 @@ class AccuWeatherEntity(
|
||||
@property
|
||||
def native_visibility(self) -> float:
|
||||
"""Return the visibility."""
|
||||
return cast(float, self.coordinator.data["Visibility"][API_METRIC]["Value"])
|
||||
return cast(
|
||||
float, self.coordinator.data["Visibility"][self._unit_system]["Value"]
|
||||
)
|
||||
|
||||
@property
|
||||
def ozone(self) -> int | None:
|
||||
|
||||
@@ -40,7 +40,7 @@ def get_scanner(
|
||||
|
||||
|
||||
class ActiontecDeviceScanner(DeviceScanner):
|
||||
"""Class which queries an actiontec router for connected devices."""
|
||||
"""This class queries an actiontec router for connected devices."""
|
||||
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the scanner."""
|
||||
|
||||
@@ -5,8 +5,6 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_TARGET_TEMP_HIGH,
|
||||
ATTR_TARGET_TEMP_LOW,
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
@@ -34,10 +32,18 @@ ADVANTAGE_AIR_HVAC_MODES = {
|
||||
"cool": HVACMode.COOL,
|
||||
"vent": HVACMode.FAN_ONLY,
|
||||
"dry": HVACMode.DRY,
|
||||
"myauto": HVACMode.HEAT_COOL,
|
||||
"myauto": HVACMode.AUTO,
|
||||
}
|
||||
HASS_HVAC_MODES = {v: k for k, v in ADVANTAGE_AIR_HVAC_MODES.items()}
|
||||
|
||||
AC_HVAC_MODES = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
|
||||
ADVANTAGE_AIR_FAN_MODES = {
|
||||
"autoAA": FAN_AUTO,
|
||||
"low": FAN_LOW,
|
||||
@@ -47,14 +53,7 @@ ADVANTAGE_AIR_FAN_MODES = {
|
||||
HASS_FAN_MODES = {v: k for k, v in ADVANTAGE_AIR_FAN_MODES.items()}
|
||||
FAN_SPEEDS = {FAN_LOW: 30, FAN_MEDIUM: 60, FAN_HIGH: 100}
|
||||
|
||||
ADVANTAGE_AIR_AUTOFAN = "aaAutoFanModeEnabled"
|
||||
ADVANTAGE_AIR_MYZONE = "MyZone"
|
||||
ADVANTAGE_AIR_MYAUTO = "MyAuto"
|
||||
ADVANTAGE_AIR_MYAUTO_ENABLED = "myAutoModeEnabled"
|
||||
ADVANTAGE_AIR_MYTEMP = "MyTemp"
|
||||
ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled"
|
||||
ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp"
|
||||
ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp"
|
||||
ZONE_HVAC_MODES = [HVACMode.OFF, HVACMode.HEAT_COOL]
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -76,7 +75,7 @@ async def async_setup_entry(
|
||||
entities.append(AdvantageAirAC(instance, ac_key))
|
||||
for zone_key, zone in ac_device["zones"].items():
|
||||
# Only add zone climate control when zone is in temperature control
|
||||
if zone["type"] > 0:
|
||||
if zone["type"] != 0:
|
||||
entities.append(AdvantageAirZone(instance, ac_key, zone_key))
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -84,56 +83,24 @@ async def async_setup_entry(
|
||||
class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
"""AdvantageAir AC unit."""
|
||||
|
||||
_attr_fan_modes = [FAN_LOW, FAN_MEDIUM, FAN_HIGH]
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_fan_modes = [FAN_AUTO, FAN_LOW, FAN_MEDIUM, FAN_HIGH]
|
||||
_attr_hvac_modes = AC_HVAC_MODES
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.FAN_MODE
|
||||
)
|
||||
|
||||
def __init__(self, instance: dict[str, Any], ac_key: str) -> None:
|
||||
"""Initialize an AdvantageAir AC unit."""
|
||||
super().__init__(instance, ac_key)
|
||||
|
||||
# Set supported features and HVAC modes based on current operating mode
|
||||
if self._ac.get(ADVANTAGE_AIR_MYAUTO_ENABLED):
|
||||
# MyAuto
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
self._attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
HVACMode.HEAT_COOL,
|
||||
]
|
||||
elif self._ac.get(ADVANTAGE_AIR_MYTEMP_ENABLED):
|
||||
# MyTemp
|
||||
self._attr_supported_features = ClimateEntityFeature.FAN_MODE
|
||||
self._attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT]
|
||||
|
||||
else:
|
||||
# MyZone
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
)
|
||||
self._attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
|
||||
# Add "ezfan" mode if supported
|
||||
if self._ac.get(ADVANTAGE_AIR_AUTOFAN):
|
||||
self._attr_fan_modes += [FAN_AUTO]
|
||||
if self._ac.get("myAutoModeEnabled"):
|
||||
self._attr_hvac_modes = AC_HVAC_MODES + [HVACMode.AUTO]
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the current target temperature."""
|
||||
return self._ac["setTemp"]
|
||||
|
||||
@@ -149,40 +116,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
"""Return the current fan modes."""
|
||||
return ADVANTAGE_AIR_FAN_MODES.get(self._ac["fan"])
|
||||
|
||||
@property
|
||||
def target_temperature_high(self) -> float | None:
|
||||
"""Return the temperature cool mode is enabled."""
|
||||
return self._ac.get(ADVANTAGE_AIR_COOL_TARGET)
|
||||
|
||||
@property
|
||||
def target_temperature_low(self) -> float | None:
|
||||
"""Return the temperature heat mode is enabled."""
|
||||
return self._ac.get(ADVANTAGE_AIR_HEAT_TARGET)
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Set the HVAC State to on."""
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"info": {
|
||||
"state": ADVANTAGE_AIR_STATE_ON,
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the HVAC State to off."""
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"info": {
|
||||
"state": ADVANTAGE_AIR_STATE_OFF,
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC Mode and State."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
@@ -209,37 +142,27 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the Temperature."""
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
await self.aircon(
|
||||
{self.ac_key: {"info": {"setTemp": kwargs[ATTR_TEMPERATURE]}}}
|
||||
)
|
||||
if ATTR_TARGET_TEMP_LOW in kwargs and ATTR_TARGET_TEMP_HIGH in kwargs:
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"info": {
|
||||
ADVANTAGE_AIR_COOL_TARGET: kwargs[ATTR_TARGET_TEMP_HIGH],
|
||||
ADVANTAGE_AIR_HEAT_TARGET: kwargs[ATTR_TARGET_TEMP_LOW],
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
await self.aircon({self.ac_key: {"info": {"setTemp": temp}}})
|
||||
|
||||
|
||||
class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
"""AdvantageAir MyTemp Zone control."""
|
||||
"""AdvantageAir Zone control."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT_COOL]
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_hvac_modes = ZONE_HVAC_MODES
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
def __init__(self, instance: dict[str, Any], ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an AdvantageAir Zone control."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = self._zone["name"]
|
||||
self._attr_unique_id = (
|
||||
f'{self.coordinator.data["system"]["rid"]}-{ac_key}-{zone_key}'
|
||||
)
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
@@ -249,7 +172,7 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self._zone["measuredTemp"]
|
||||
|
||||
@@ -258,32 +181,24 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
"""Return the target temperature."""
|
||||
return self._zone["setTemp"]
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Set the HVAC State to on."""
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_OPEN}}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the HVAC State to off."""
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_CLOSE}}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set the HVAC Mode and State."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.async_turn_off()
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_CLOSE}}
|
||||
}
|
||||
}
|
||||
)
|
||||
else:
|
||||
await self.async_turn_on()
|
||||
await self.aircon(
|
||||
{
|
||||
self.ac_key: {
|
||||
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_OPEN}}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set the Temperature."""
|
||||
|
||||
@@ -68,7 +68,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_CAQI,
|
||||
icon="mdi:air-filter",
|
||||
translation_key="caqi",
|
||||
name=ATTR_API_CAQI,
|
||||
native_unit_of_measurement="CAQI",
|
||||
suggested_display_precision=0,
|
||||
attrs=lambda data: {
|
||||
@@ -80,7 +80,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM1,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
translation_key="pm1",
|
||||
name="PM1.0",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -88,7 +88,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM25,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
translation_key="pm25",
|
||||
name="PM2.5",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -100,7 +100,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PM10,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
translation_key="pm10",
|
||||
name=ATTR_API_PM10,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -112,7 +112,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_HUMIDITY,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
translation_key="humidity",
|
||||
name=ATTR_API_HUMIDITY.capitalize(),
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
@@ -120,7 +120,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_PRESSURE,
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
translation_key="pressure",
|
||||
name=ATTR_API_PRESSURE.capitalize(),
|
||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -128,14 +128,14 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
translation_key="temperature",
|
||||
name=ATTR_API_TEMPERATURE.capitalize(),
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_CO,
|
||||
translation_key="co",
|
||||
name="Carbon monoxide",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -147,7 +147,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_NO2,
|
||||
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
|
||||
translation_key="no2",
|
||||
name="Nitrogen dioxide",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -159,7 +159,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_SO2,
|
||||
device_class=SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||
translation_key="so2",
|
||||
name="Sulphur dioxide",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -171,7 +171,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_O3,
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
translation_key="o3",
|
||||
name="Ozone",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
|
||||
@@ -26,42 +26,5 @@
|
||||
"requests_remaining": "Remaining allowed requests",
|
||||
"requests_per_day": "Allowed requests per day"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"caqi": {
|
||||
"name": "Common air quality index"
|
||||
},
|
||||
"pm1": {
|
||||
"name": "[%key:component::sensor::entity_component::pm1::name%]"
|
||||
},
|
||||
"pm25": {
|
||||
"name": "[%key:component::sensor::entity_component::pm25::name%]"
|
||||
},
|
||||
"pm10": {
|
||||
"name": "[%key:component::sensor::entity_component::pm10::name%]"
|
||||
},
|
||||
"humidity": {
|
||||
"name": "[%key:component::sensor::entity_component::humidity::name%]"
|
||||
},
|
||||
"pressure": {
|
||||
"name": "[%key:component::sensor::entity_component::pressure::name%]"
|
||||
},
|
||||
"temperature": {
|
||||
"name": "[%key:component::sensor::entity_component::temperature::name%]"
|
||||
},
|
||||
"co": {
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
},
|
||||
"no2": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
"so2": {
|
||||
"name": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
},
|
||||
"o3": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,58 @@
|
||||
"""The air-Q integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirQCoordinator
|
||||
from aioairq import AirQ
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
class AirQCoordinator(DataUpdateCoordinator):
|
||||
"""Coordinator is responsible for querying the device at a specified route."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialise a custom coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
self.airq = AirQ(
|
||||
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
|
||||
)
|
||||
self.device_id = entry.unique_id
|
||||
assert self.device_id is not None
|
||||
self.device_info = DeviceInfo(
|
||||
manufacturer=MANUFACTURER,
|
||||
identifiers={(DOMAIN, self.device_id)},
|
||||
)
|
||||
self.device_info.update(entry.data["device_info"])
|
||||
|
||||
async def _async_update_data(self) -> dict:
|
||||
"""Fetch the data from the device."""
|
||||
data = await self.airq.get(TARGET_ROUTE)
|
||||
return self.airq.drop_uncertainties_from_data(data)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up air-Q from a config entry."""
|
||||
|
||||
|
||||
@@ -74,11 +74,12 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
device_info = await airq.fetch_device_info()
|
||||
await self.async_set_unique_id(device_info["id"])
|
||||
await self.async_set_unique_id(device_info.pop("id"))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=device_info["name"], data=user_input
|
||||
title=device_info["name"],
|
||||
data=user_input | {"device_info": device_info},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
"""The air-Q integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioairq import AirQ
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AirQCoordinator(DataUpdateCoordinator):
|
||||
"""Coordinator is responsible for querying the device at a specified route."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialise a custom coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
self.airq = AirQ(
|
||||
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
|
||||
)
|
||||
self.device_id = entry.unique_id
|
||||
assert self.device_id is not None
|
||||
self.device_info = DeviceInfo(
|
||||
manufacturer=MANUFACTURER,
|
||||
identifiers={(DOMAIN, self.device_id)},
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict:
|
||||
"""Fetch the data from the device."""
|
||||
if "name" not in self.device_info:
|
||||
info = await self.airq.fetch_device_info()
|
||||
self.device_info.update(
|
||||
DeviceInfo(
|
||||
name=info["name"],
|
||||
model=info["model"],
|
||||
sw_version=info["sw_version"],
|
||||
hw_version=info["hw_version"],
|
||||
)
|
||||
)
|
||||
|
||||
data = await self.airq.get(TARGET_ROUTE)
|
||||
return self.airq.drop_uncertainties_from_data(data)
|
||||
@@ -51,13 +51,6 @@ class AirQEntityDescription(SensorEntityDescription, AirQEntityDescriptionMixin)
|
||||
|
||||
# Keys must match those in the data dictionary
|
||||
SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="c2h4o",
|
||||
name="Acetaldehyde",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("c2h4o"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="nh3_MR100",
|
||||
name="Ammonia",
|
||||
@@ -65,27 +58,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("nh3_MR100"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="ash3",
|
||||
name="Arsine",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("ash3"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="br2",
|
||||
name="Bromine",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("br2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="ch4s",
|
||||
name="CH4S",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("ch4s"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="cl2_M20",
|
||||
name="Chlorine",
|
||||
@@ -93,16 +65,10 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("cl2_M20"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="clo2",
|
||||
name="ClO2",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("clo2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="co",
|
||||
name="CO",
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("co"),
|
||||
@@ -115,13 +81,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("co2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="cs2",
|
||||
name="CS2",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("cs2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="dewpt",
|
||||
name="Dew point",
|
||||
@@ -137,13 +96,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("ethanol"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="c2h4",
|
||||
name="Ethylene",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("c2h4"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="ch2o_M10",
|
||||
name="Formaldehyde",
|
||||
@@ -151,13 +103,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("ch2o_M10"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="f2",
|
||||
name="Fluorine",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("f2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="h2s",
|
||||
name="H2S",
|
||||
@@ -165,27 +110,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("h2s"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="hcl",
|
||||
name="HCl",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("hcl"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="hcn",
|
||||
name="HCN",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("hcn"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="hf",
|
||||
name="HF",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("hf"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="health",
|
||||
name="Health Index",
|
||||
@@ -217,13 +141,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("h2_M1000"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="h2o2",
|
||||
name="Hydrogen peroxide",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("h2o2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="ch4_MIPEX",
|
||||
name="Methane",
|
||||
@@ -256,11 +173,12 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
value=lambda data: data.get("no2"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="acid_M100",
|
||||
name="Organic acid",
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
key="o3",
|
||||
name="Ozone",
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("acid_M100"),
|
||||
value=lambda data: data.get("o3"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="oxygen",
|
||||
@@ -270,14 +188,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
value=lambda data: data.get("oxygen"),
|
||||
icon="mdi:leaf",
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="o3",
|
||||
name="Ozone",
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("o3"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="performance",
|
||||
name="Performance Index",
|
||||
@@ -286,13 +196,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
icon="mdi:head-check",
|
||||
value=lambda data: data.get("performance", 0.0) / 10.0,
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="ph3",
|
||||
name="PH3",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("ph3"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="pm1",
|
||||
name="PM1",
|
||||
@@ -343,20 +246,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("c3h8_MIPEX"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="refigerant",
|
||||
name="Refrigerant",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("refigerant"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="sih4",
|
||||
name="SiH4",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("sih4"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="so2",
|
||||
name="SO2",
|
||||
@@ -400,6 +289,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc",
|
||||
name="VOC",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc"),
|
||||
@@ -407,18 +297,11 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
AirQEntityDescription(
|
||||
key="tvoc_ionsc",
|
||||
name="VOC (Industrial)",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("tvoc_ionsc"),
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="virus",
|
||||
name="Virus Index",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
icon="mdi:virus-off",
|
||||
value=lambda data: data.get("virus", 0.0),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -380,6 +380,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
else:
|
||||
entry.version = version
|
||||
hass.config_entries.async_update_entry(entry)
|
||||
|
||||
LOGGER.info("Migration to version %s successful", version)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from AIOAladdinConnect import AladdinConnectClient, session_manager
|
||||
from AIOAladdinConnect import AladdinConnectClient
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -46,7 +46,7 @@ class AladdinDevice(CoverEntity):
|
||||
) -> None:
|
||||
"""Initialize the Aladdin Connect cover."""
|
||||
self._acc = acc
|
||||
self._entry_id = entry.entry_id
|
||||
|
||||
self._device_id = device["device_id"]
|
||||
self._number = device["door_number"]
|
||||
self._name = device["name"]
|
||||
@@ -85,18 +85,7 @@ class AladdinDevice(CoverEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update status of cover."""
|
||||
try:
|
||||
await self._acc.get_doors(self._serial)
|
||||
self._attr_available = True
|
||||
|
||||
except session_manager.ConnectionError:
|
||||
self._attr_available = False
|
||||
|
||||
except session_manager.InvalidPasswordError:
|
||||
self._attr_available = False
|
||||
await self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self._entry_id)
|
||||
)
|
||||
await self._acc.get_doors(self._serial)
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.const import (
|
||||
SERVICE_ALARM_TRIGGER,
|
||||
)
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import entity_registry
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
@@ -57,11 +57,11 @@ async def async_get_actions(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> list[dict[str, str]]:
|
||||
"""List device actions for Alarm control panel devices."""
|
||||
registry = er.async_get(hass)
|
||||
registry = entity_registry.async_get(hass)
|
||||
actions = []
|
||||
|
||||
# Get all the integrations entities for this device
|
||||
for entry in er.async_entries_for_device(registry, device_id):
|
||||
for entry in entity_registry.async_entries_for_device(registry, device_id):
|
||||
if entry.domain != DOMAIN:
|
||||
continue
|
||||
|
||||
|
||||
@@ -21,11 +21,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
condition,
|
||||
config_validation as cv,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers import condition, config_validation as cv, entity_registry
|
||||
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
@@ -68,11 +64,11 @@ async def async_get_conditions(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> list[dict[str, str]]:
|
||||
"""List device conditions for Alarm control panel devices."""
|
||||
registry = er.async_get(hass)
|
||||
registry = entity_registry.async_get(hass)
|
||||
conditions = []
|
||||
|
||||
# Get all the integrations entities for this device
|
||||
for entry in er.async_entries_for_device(registry, device_id):
|
||||
for entry in entity_registry.async_entries_for_device(registry, device_id):
|
||||
if entry.domain != DOMAIN:
|
||||
continue
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.const import (
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -57,11 +57,11 @@ async def async_get_triggers(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> list[dict[str, str]]:
|
||||
"""List device triggers for Alarm control panel devices."""
|
||||
registry = er.async_get(hass)
|
||||
registry = entity_registry.async_get(hass)
|
||||
triggers: list[dict[str, str]] = []
|
||||
|
||||
# Get all the integrations entities for this device
|
||||
for entry in er.async_entries_for_device(registry, device_id):
|
||||
for entry in entity_registry.async_entries_for_device(registry, device_id):
|
||||
if entry.domain != DOMAIN:
|
||||
continue
|
||||
|
||||
|
||||
@@ -26,41 +26,19 @@
|
||||
"armed_vacation": "{entity_name} armed vacation"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"state": {
|
||||
"_": {
|
||||
"name": "[%key:component::alarm_control_panel::title%]",
|
||||
"state": {
|
||||
"armed": "Armed",
|
||||
"disarmed": "Disarmed",
|
||||
"armed_home": "Armed home",
|
||||
"armed_away": "Armed away",
|
||||
"armed_night": "Armed night",
|
||||
"armed_vacation": "Armed vacation",
|
||||
"armed_custom_bypass": "Armed custom bypass",
|
||||
"pending": "Pending",
|
||||
"arming": "Arming",
|
||||
"disarming": "Disarming",
|
||||
"triggered": "Triggered"
|
||||
},
|
||||
"state_attributes": {
|
||||
"code_format": {
|
||||
"name": "Code format",
|
||||
"state": {
|
||||
"text": "Text",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"changed_by": {
|
||||
"name": "Changed by"
|
||||
},
|
||||
"code_arm_required": {
|
||||
"name": "Code for arming",
|
||||
"state": {
|
||||
"true": "Required",
|
||||
"false": "Not required"
|
||||
}
|
||||
}
|
||||
}
|
||||
"armed": "Armed",
|
||||
"disarmed": "Disarmed",
|
||||
"armed_home": "Armed home",
|
||||
"armed_away": "Armed away",
|
||||
"armed_night": "Armed night",
|
||||
"armed_vacation": "Armed vacation",
|
||||
"armed_custom_bypass": "Armed custom bypass",
|
||||
"pending": "Pending",
|
||||
"arming": "Arming",
|
||||
"disarming": "Disarming",
|
||||
"triggered": "Triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
{
|
||||
"title": "Alert",
|
||||
"entity_component": {
|
||||
"state": {
|
||||
"_": {
|
||||
"name": "[%key:component::alert::title%]",
|
||||
"state": {
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"off": "Acknowledged",
|
||||
"on": "[%key:common::state::active%]"
|
||||
}
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"off": "Acknowledged",
|
||||
"on": "[%key:common::state::active%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from .const import API_TEMP_UNITS
|
||||
|
||||
|
||||
class UnsupportedProperty(HomeAssistantError):
|
||||
"""Does not support the requested Smart Home API property."""
|
||||
"""This entity does not support the requested Smart Home API property."""
|
||||
|
||||
|
||||
class NoTokenAvailable(HomeAssistantError):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Support for Alexa skill service end point."""
|
||||
import copy
|
||||
import hmac
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
@@ -47,7 +48,7 @@ class AlexaFlashBriefingView(http.HomeAssistantView):
|
||||
def __init__(self, hass, flash_briefings):
|
||||
"""Initialize Alexa view."""
|
||||
super().__init__()
|
||||
self.flash_briefings = flash_briefings
|
||||
self.flash_briefings = copy.deepcopy(flash_briefings)
|
||||
template.attach(hass, self.flash_briefings)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -34,49 +34,49 @@ CONF_TEXT_TYPE: Final = "text_type"
|
||||
|
||||
SUPPORTED_VOICES: Final[list[str]] = [
|
||||
"Aditi", # Hindi
|
||||
"Amy", # English (British)
|
||||
"Aria", # English (New Zealand), Neural
|
||||
"Amy",
|
||||
"Aria",
|
||||
"Arlet", # Catalan, Neural
|
||||
"Arthur", # English, Neural
|
||||
"Astrid", # Swedish
|
||||
"Ayanda", # English (South African), Neural
|
||||
"Ayanda",
|
||||
"Bianca", # Italian
|
||||
"Brian", # English (British)
|
||||
"Brian",
|
||||
"Camila", # Portuguese, Brazilian
|
||||
"Carla", # Italian
|
||||
"Carla",
|
||||
"Carmen", # Romanian
|
||||
"Celine", # French
|
||||
"Celine",
|
||||
"Chantal", # French Canadian
|
||||
"Conchita", # Spanish (European)
|
||||
"Cristiano", # Portuguese (European)
|
||||
"Conchita",
|
||||
"Cristiano",
|
||||
"Daniel", # German, Neural
|
||||
"Dora", # Icelandic
|
||||
"Elin", # Swedish, Neural
|
||||
"Emma", # English
|
||||
"Enrique", # Spanish (European)
|
||||
"Ewa", # Polish
|
||||
"Enrique",
|
||||
"Ewa",
|
||||
"Filiz", # Turkish
|
||||
"Gabrielle", # French (Canadian)
|
||||
"Gabrielle",
|
||||
"Geraint", # English Welsh
|
||||
"Giorgio", # Italian
|
||||
"Giorgio",
|
||||
"Gwyneth", # Welsh
|
||||
"Hala", # Arabic (Gulf), Neural
|
||||
"Hannah", # German (Austrian), Neural
|
||||
"Hans", # German
|
||||
"Hans",
|
||||
"Hiujin", # Chinese (Cantonese), Neural
|
||||
"Ida", # Norwegian, Neural
|
||||
"Ines", # Portuguese, European
|
||||
"Ivy", # English
|
||||
"Jacek", # Polish
|
||||
"Jan", # Polish
|
||||
"Joanna", # English
|
||||
"Joey", # English
|
||||
"Justin", # English
|
||||
"Ivy",
|
||||
"Jacek",
|
||||
"Jan",
|
||||
"Joanna",
|
||||
"Joey",
|
||||
"Justin",
|
||||
"Kajal", # English (Indian)/Hindi (Bilingual ), Neural
|
||||
"Karl", # Icelandic
|
||||
"Kendra", # English
|
||||
"Kevin", # English, Neural
|
||||
"Kimberly", # English
|
||||
"Karl",
|
||||
"Kendra",
|
||||
"Kevin",
|
||||
"Kimberly",
|
||||
"Laura", # Dutch, Neural
|
||||
"Lea", # French
|
||||
"Liam", # Canadian French, Neural
|
||||
@@ -84,12 +84,12 @@ SUPPORTED_VOICES: Final[list[str]] = [
|
||||
"Lotte", # Dutch
|
||||
"Lucia", # Spanish European
|
||||
"Lupe", # Spanish US
|
||||
"Mads", # Danish
|
||||
"Mads",
|
||||
"Maja", # Polish
|
||||
"Marlene", # German
|
||||
"Mathieu", # French
|
||||
"Matthew", # English
|
||||
"Maxim", # Russian
|
||||
"Marlene",
|
||||
"Mathieu",
|
||||
"Matthew",
|
||||
"Maxim",
|
||||
"Mia", # Spanish Mexican
|
||||
"Miguel", # Spanish US
|
||||
"Mizuki", # Japanese
|
||||
@@ -100,19 +100,17 @@ SUPPORTED_VOICES: Final[list[str]] = [
|
||||
"Penelope", # Spanish US
|
||||
"Pedro", # Spanish US, Neural
|
||||
"Raveena", # English, Indian
|
||||
"Ricardo", # Portuguese (Brazilian)
|
||||
"Ruben", # Dutch
|
||||
"Russell", # English (Australian)
|
||||
"Ruth", # English, Neural
|
||||
"Ricardo",
|
||||
"Ruben",
|
||||
"Russell",
|
||||
"Salli", # English
|
||||
"Seoyeon", # Korean
|
||||
"Stephen", # English, Neural
|
||||
"Suvi", # Finnish
|
||||
"Takumi", # Japanese
|
||||
"Takumi",
|
||||
"Tatyana", # Russian
|
||||
"Vicki", # German
|
||||
"Vitoria", # Portuguese, Brazilian
|
||||
"Zeina", # Arabic
|
||||
"Zeina",
|
||||
"Zhiyu", # Chinese
|
||||
]
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Final
|
||||
from typing import Final
|
||||
|
||||
import boto3
|
||||
import botocore
|
||||
@@ -166,8 +166,8 @@ class AmazonPollyProvider(Provider):
|
||||
def get_tts_audio(
|
||||
self,
|
||||
message: str,
|
||||
language: str,
|
||||
options: dict[str, Any] | None = None,
|
||||
language: str | None = None,
|
||||
options: dict[str, str] | None = None,
|
||||
) -> TtsAudioType:
|
||||
"""Request TTS file from Polly."""
|
||||
if options is None or language is None:
|
||||
|
||||
@@ -117,6 +117,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
en_reg.async_clear_config_entry(entry.entry_id)
|
||||
|
||||
version = entry.version = 2
|
||||
hass.config_entries.async_update_entry(entry)
|
||||
|
||||
LOGGER.info("Migration to version %s successful", version)
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioambient"],
|
||||
"requirements": ["aioambient==2023.04.0"]
|
||||
"requirements": ["aioambient==2021.11.0"]
|
||||
}
|
||||
|
||||
@@ -20,12 +20,13 @@ from homeassistant.components.camera import (
|
||||
from homeassistant.components.ffmpeg import FFmpegManager, get_ffmpeg_manager
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers import entity_registry
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_aiohttp_proxy_stream,
|
||||
async_aiohttp_proxy_web,
|
||||
async_get_clientsession,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -145,7 +146,7 @@ async def async_setup_platform(
|
||||
# with this version, update the old entity with the new unique id.
|
||||
serial_number = await device.api.async_serial_number
|
||||
serial_number = serial_number.strip()
|
||||
registry = er.async_get(hass)
|
||||
registry = entity_registry.async_get(hass)
|
||||
entity_id = registry.async_get_entity_id(CAMERA_DOMAIN, DOMAIN, serial_number)
|
||||
if entity_id is not None:
|
||||
_LOGGER.debug("Updating unique id for camera %s", entity_id)
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import CONF_STATION_ID, SCAN_INTERVAL
|
||||
from .const import ATTRIBUTION, CONF_STATION_ID, SCAN_INTERVAL
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,8 +54,6 @@ async def async_setup_platform(
|
||||
class AmpioSmogQuality(AirQualityEntity):
|
||||
"""Implementation of an Ampio Smog air quality entity."""
|
||||
|
||||
_attr_attribution = "Data provided by Ampio"
|
||||
|
||||
def __init__(
|
||||
self, api: AmpioSmogMapData, station_id: str, name: str | None
|
||||
) -> None:
|
||||
@@ -84,6 +82,11 @@ class AmpioSmogQuality(AirQualityEntity):
|
||||
"""Return the particulate matter 10 level."""
|
||||
return self._ampio.api.pm10 # type: ignore[no-any-return]
|
||||
|
||||
@property
|
||||
def attribution(self) -> str:
|
||||
"""Return the attribution."""
|
||||
return ATTRIBUTION
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data from the AmpioMap API."""
|
||||
await self._ampio.async_update()
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
ATTRIBUTION: Final = "Data provided by Ampio"
|
||||
CONF_STATION_ID: Final = "station_id"
|
||||
SCAN_INTERVAL: Final = timedelta(minutes=10)
|
||||
|
||||
@@ -27,9 +27,7 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
async_call_later(hass, 900, analytics.send_analytics)
|
||||
|
||||
# Send every day
|
||||
async_track_time_interval(
|
||||
hass, analytics.send_analytics, INTERVAL, name="analytics daily"
|
||||
)
|
||||
async_track_time_interval(hass, analytics.send_analytics, INTERVAL)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for functionality to interact with Android/Fire TV devices."""
|
||||
"""Support for functionality to interact with Android TV/Fire TV devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
@@ -135,11 +135,11 @@ async def async_connect_androidtv(
|
||||
if not aftv.available:
|
||||
# Determine the name that will be used for the device in the log
|
||||
if config[CONF_DEVICE_CLASS] == DEVICE_ANDROIDTV:
|
||||
device_name = "Android device"
|
||||
device_name = "Android TV device"
|
||||
elif config[CONF_DEVICE_CLASS] == DEVICE_FIRETV:
|
||||
device_name = "Fire TV device"
|
||||
else:
|
||||
device_name = "Android / Fire TV device"
|
||||
device_name = "Android TV / Fire TV device"
|
||||
|
||||
error_message = f"Could not connect to {device_name} at {address} {adb_log}"
|
||||
return None, error_message
|
||||
@@ -148,7 +148,7 @@ async def async_connect_androidtv(
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
"""Set up Android TV platform."""
|
||||
|
||||
state_det_rules = entry.options.get(CONF_STATE_DETECTION_RULES)
|
||||
if CONF_ADB_SERVER_IP not in entry.data:
|
||||
@@ -167,7 +167,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
raise ConfigEntryNotReady(error_message)
|
||||
|
||||
async def async_close_connection(event):
|
||||
"""Close Android Debug Bridge connection on HA Stop."""
|
||||
"""Close Android TV connection on HA Stop."""
|
||||
await aftv.adb_close()
|
||||
|
||||
entry.async_on_unload(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Config flow to configure the Android Debug Bridge integration."""
|
||||
"""Config flow to configure the Android TV integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
@@ -114,14 +114,13 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
async def _async_check_connection(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> tuple[str | None, str | None]:
|
||||
"""Attempt to connect the Android device."""
|
||||
"""Attempt to connect the Android TV."""
|
||||
|
||||
try:
|
||||
aftv, error_message = await async_connect_androidtv(self.hass, user_input)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"Unknown error connecting with Android device at %s",
|
||||
user_input[CONF_HOST],
|
||||
"Unknown error connecting with Android TV at %s", user_input[CONF_HOST]
|
||||
)
|
||||
return RESULT_UNKNOWN, None
|
||||
|
||||
@@ -131,7 +130,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
dev_prop = aftv.device_properties
|
||||
_LOGGER.info(
|
||||
"Android device at %s: %s = %r, %s = %r",
|
||||
"Android TV at %s: %s = %r, %s = %r",
|
||||
user_input[CONF_HOST],
|
||||
PROP_ETHMAC,
|
||||
dev_prop.get(PROP_ETHMAC),
|
||||
@@ -185,7 +184,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
"""Handle an option flow for Android Debug Bridge."""
|
||||
"""Handle an option flow for Android TV."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Android Debug Bridge component constants."""
|
||||
"""Android TV component constants."""
|
||||
DOMAIN = "androidtv"
|
||||
|
||||
ANDROID_DEV = DOMAIN
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "androidtv",
|
||||
"name": "Android Debug Bridge",
|
||||
"name": "Android TV",
|
||||
"codeowners": ["@JeffLIrion", "@ollo69"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for functionality to interact with Android / Fire TV devices."""
|
||||
"""Support for functionality to interact with Android TV / Fire TV devices."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
@@ -87,7 +87,7 @@ async def async_setup_entry(
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Android Debug Bridge entity."""
|
||||
"""Set up the Android TV entity."""
|
||||
aftv = hass.data[DOMAIN][entry.entry_id][ANDROID_DEV]
|
||||
device_class = aftv.DEVICE_CLASS
|
||||
device_type = (
|
||||
@@ -201,7 +201,7 @@ def adb_decorator(
|
||||
|
||||
|
||||
class ADBDevice(MediaPlayerEntity):
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
"""Representation of an Android TV or Fire TV device."""
|
||||
|
||||
_attr_device_class = MediaPlayerDeviceClass.TV
|
||||
|
||||
@@ -214,7 +214,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
entry_id,
|
||||
entry_data,
|
||||
):
|
||||
"""Initialize the Android / Fire TV device."""
|
||||
"""Initialize the Android TV / Fire TV device."""
|
||||
self.aftv = aftv
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
@@ -384,7 +384,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
|
||||
@adb_decorator()
|
||||
async def adb_command(self, command):
|
||||
"""Send an ADB command to an Android / Fire TV device."""
|
||||
"""Send an ADB command to an Android TV / Fire TV device."""
|
||||
if key := KEYS.get(command):
|
||||
await self.aftv.adb_shell(f"input keyevent {key}")
|
||||
return
|
||||
@@ -422,13 +422,13 @@ class ADBDevice(MediaPlayerEntity):
|
||||
persistent_notification.async_create(
|
||||
self.hass,
|
||||
msg,
|
||||
title="Android Debug Bridge",
|
||||
title="Android TV",
|
||||
)
|
||||
_LOGGER.info("%s", msg)
|
||||
|
||||
@adb_decorator()
|
||||
async def service_download(self, device_path, local_path):
|
||||
"""Download a file from your Android / Fire TV device to your Home Assistant instance."""
|
||||
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
|
||||
if not self.hass.config.is_allowed_path(local_path):
|
||||
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
|
||||
return
|
||||
@@ -437,7 +437,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
|
||||
@adb_decorator()
|
||||
async def service_upload(self, device_path, local_path):
|
||||
"""Upload a file from your Home Assistant instance to an Android / Fire TV device."""
|
||||
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
|
||||
if not self.hass.config.is_allowed_path(local_path):
|
||||
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
|
||||
return
|
||||
@@ -446,7 +446,7 @@ class ADBDevice(MediaPlayerEntity):
|
||||
|
||||
|
||||
class AndroidTVDevice(ADBDevice):
|
||||
"""Representation of an Android device."""
|
||||
"""Representation of an Android TV device."""
|
||||
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.PAUSE
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Describes the format for available Android and Fire TV services
|
||||
# Describes the format for available Android TV and Fire TV services
|
||||
|
||||
adb_command:
|
||||
name: ADB command
|
||||
description: Send an ADB command to an Android / Fire TV device.
|
||||
description: Send an ADB command to an Android TV / Fire TV device.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
@@ -17,7 +17,7 @@ adb_command:
|
||||
text:
|
||||
download:
|
||||
name: Download
|
||||
description: Download a file from your Android / Fire TV device to your Home Assistant instance.
|
||||
description: Download a file from your Android TV / Fire TV device to your Home Assistant instance.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
@@ -25,7 +25,7 @@ download:
|
||||
fields:
|
||||
device_path:
|
||||
name: Device path
|
||||
description: The filepath on the Android / Fire TV device.
|
||||
description: The filepath on the Android TV / Fire TV device.
|
||||
required: true
|
||||
example: "/storage/emulated/0/Download/example.txt"
|
||||
selector:
|
||||
@@ -39,7 +39,7 @@ download:
|
||||
text:
|
||||
upload:
|
||||
name: Upload
|
||||
description: Upload a file from your Home Assistant instance to an Android / Fire TV device.
|
||||
description: Upload a file from your Home Assistant instance to an Android TV / Fire TV device.
|
||||
target:
|
||||
entity:
|
||||
integration: androidtv
|
||||
@@ -47,7 +47,7 @@ upload:
|
||||
fields:
|
||||
device_path:
|
||||
name: Device path
|
||||
description: The filepath on the Android / Fire TV device.
|
||||
description: The filepath on the Android TV / Fire TV device.
|
||||
required: true
|
||||
example: "/storage/emulated/0/Download/example.txt"
|
||||
selector:
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"title": "Configure Android TV Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
@@ -47,7 +47,7 @@
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"title": "Configure Android TV state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "Application ID",
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
"""The Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from androidtvremote2 import (
|
||||
AndroidTVRemote,
|
||||
CannotConnect,
|
||||
ConnectionClosed,
|
||||
InvalidAuth,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import create_api
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.REMOTE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Android TV Remote from a config entry."""
|
||||
|
||||
api = create_api(hass, entry.data[CONF_HOST])
|
||||
try:
|
||||
await api.async_connect()
|
||||
except InvalidAuth as exc:
|
||||
# The Android TV is hard reset or the certificate and key files were deleted.
|
||||
raise ConfigEntryAuthFailed from exc
|
||||
except (CannotConnect, ConnectionClosed) as exc:
|
||||
# The Android TV is network unreachable. Raise exception and let Home Assistant retry
|
||||
# later. If device gets a new IP address the zeroconf flow will update the config.
|
||||
raise ConfigEntryNotReady from exc
|
||||
|
||||
def reauth_needed() -> None:
|
||||
"""Start a reauth flow if Android TV is hard reset while reconnecting."""
|
||||
entry.async_start_reauth(hass)
|
||||
|
||||
# Start a task (canceled in disconnect) to keep reconnecting if device becomes
|
||||
# network unreachable. If device gets a new IP address the zeroconf flow will
|
||||
# update the config entry data and reload the config entry.
|
||||
api.keep_reconnecting(reauth_needed)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = api
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@callback
|
||||
def on_hass_stop(event) -> None:
|
||||
"""Stop push updates when hass stops."""
|
||||
api.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
api: AndroidTVRemote = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
api.disconnect()
|
||||
|
||||
return unload_ok
|
||||
@@ -1,187 +0,0 @@
|
||||
"""Config flow for Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import (
|
||||
AndroidTVRemote,
|
||||
CannotConnect,
|
||||
ConnectionClosed,
|
||||
InvalidAuth,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import create_api
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("host"): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_PAIR_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("pin"): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AndroidTVRemoteConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Android TV Remote."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AndroidTVRemoteConfigFlow."""
|
||||
self.api: AndroidTVRemote | None = None
|
||||
self.reauth_entry: config_entries.ConfigEntry | None = None
|
||||
self.host: str | None = None
|
||||
self.name: str | None = None
|
||||
self.mac: str | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self.host = user_input["host"]
|
||||
assert self.host
|
||||
api = create_api(self.hass, self.host)
|
||||
try:
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def _async_start_pair(self) -> FlowResult:
|
||||
"""Start pairing with the Android TV. Navigate to the pair flow to enter the PIN shown on screen."""
|
||||
assert self.host
|
||||
self.api = create_api(self.hass, self.host)
|
||||
await self.api.async_generate_cert_if_missing()
|
||||
await self.api.async_start_pairing()
|
||||
return await self.async_step_pair()
|
||||
|
||||
async def async_step_pair(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the pair step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
assert self.api
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.reauth_entry:
|
||||
await self.hass.config_entries.async_reload(
|
||||
self.reauth_entry.entry_id
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
assert self.name
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
except InvalidAuth:
|
||||
# Invalid PIN. Stay in the pair step allowing the user to enter
|
||||
# a different PIN.
|
||||
errors["base"] = "invalid_auth"
|
||||
except ConnectionClosed:
|
||||
# Either user canceled pairing on the Android TV itself (most common)
|
||||
# or device doesn't respond to the specified host (device was unplugged,
|
||||
# network was unplugged, or device got a new IP address).
|
||||
# Attempt to pair again.
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device doesn't respond to the specified host. Abort.
|
||||
# If we are in the user flow we could go back to the user step to allow
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
# flow. Simpler to abort for both flows.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
return self.async_show_form(
|
||||
step_id="pair",
|
||||
data_schema=STEP_PAIR_DATA_SCHEMA,
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> FlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self.host = discovery_info.host
|
||||
self.name = discovery_info.name.removesuffix("._androidtvremote2._tcp.local.")
|
||||
self.mac = discovery_info.properties.get("bt")
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self.host, CONF_NAME: self.name}
|
||||
)
|
||||
self.context.update({"title_placeholders": {CONF_NAME: self.name}})
|
||||
return await self.async_step_zeroconf_confirm()
|
||||
|
||||
async def async_step_zeroconf_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initiated by zeroconf."""
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device became network unreachable after discovery.
|
||||
# Abort and let discovery find it again later.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self.host = entry_data[CONF_HOST]
|
||||
self.name = entry_data[CONF_NAME]
|
||||
self.mac = entry_data[CONF_MAC]
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> FlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Device is network unreachable. Abort.
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self.name},
|
||||
errors=errors,
|
||||
)
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Constants for the Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "androidtv_remote"
|
||||
@@ -1,29 +0,0 @@
|
||||
"""Diagnostics support for Android TV Remote."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TO_REDACT = {CONF_HOST, CONF_MAC}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
api: AndroidTVRemote = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return async_redact_data(
|
||||
{
|
||||
"api_device_info": api.device_info,
|
||||
"config_entry_data": entry.data,
|
||||
},
|
||||
TO_REDACT,
|
||||
)
|
||||
@@ -1,18 +0,0 @@
|
||||
"""Helper functions for Android TV Remote integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
|
||||
def create_api(hass: HomeAssistant, host: str) -> AndroidTVRemote:
|
||||
"""Create an AndroidTVRemote instance."""
|
||||
return AndroidTVRemote(
|
||||
client_name="Home Assistant",
|
||||
certfile=hass.config.path(STORAGE_DIR, "androidtv_remote_cert.pem"),
|
||||
keyfile=hass.config.path(STORAGE_DIR, "androidtv_remote_key.pem"),
|
||||
host=host,
|
||||
loop=hass.loop,
|
||||
)
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"domain": "androidtv_remote",
|
||||
"name": "Android TV Remote",
|
||||
"codeowners": ["@tronikos"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv_remote",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.0.7"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
"""Remote control support for Android TV Remote."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote, ConnectionClosed
|
||||
|
||||
from homeassistant.components.remote import (
|
||||
ATTR_ACTIVITY,
|
||||
ATTR_DELAY_SECS,
|
||||
ATTR_HOLD_SECS,
|
||||
ATTR_NUM_REPEATS,
|
||||
DEFAULT_DELAY_SECS,
|
||||
DEFAULT_HOLD_SECS,
|
||||
DEFAULT_NUM_REPEATS,
|
||||
RemoteEntity,
|
||||
RemoteEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.entity import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Android TV remote entity based on a config entry."""
|
||||
api: AndroidTVRemote = hass.data[DOMAIN][config_entry.entry_id]
|
||||
async_add_entities([AndroidTVRemoteEntity(api, config_entry)])
|
||||
|
||||
|
||||
class AndroidTVRemoteEntity(RemoteEntity):
|
||||
"""Representation of an Android TV Remote."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, api: AndroidTVRemote, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize device."""
|
||||
self._api = api
|
||||
self._host = config_entry.data[CONF_HOST]
|
||||
self._name = config_entry.data[CONF_NAME]
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_supported_features = RemoteEntityFeature.ACTIVITY
|
||||
self._attr_is_on = api.is_on
|
||||
self._attr_current_activity = api.current_app
|
||||
device_info = api.device_info
|
||||
assert config_entry.unique_id
|
||||
assert device_info
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])},
|
||||
identifiers={(DOMAIN, config_entry.unique_id)},
|
||||
name=self._name,
|
||||
manufacturer=device_info["manufacturer"],
|
||||
model=device_info["model"],
|
||||
)
|
||||
|
||||
@callback
|
||||
def is_on_updated(is_on: bool) -> None:
|
||||
self._attr_is_on = is_on
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def current_app_updated(current_app: str) -> None:
|
||||
self._attr_current_activity = current_app
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def is_available_updated(is_available: bool) -> None:
|
||||
if is_available:
|
||||
_LOGGER.info(
|
||||
"Reconnected to %s at %s",
|
||||
self._name,
|
||||
self._host,
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Disconnected from %s at %s",
|
||||
self._name,
|
||||
self._host,
|
||||
)
|
||||
self._attr_available = is_available
|
||||
self.async_write_ha_state()
|
||||
|
||||
api.add_is_on_updated_callback(is_on_updated)
|
||||
api.add_current_app_updated_callback(current_app_updated)
|
||||
api.add_is_available_updated_callback(is_available_updated)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the Android TV on."""
|
||||
if not self.is_on:
|
||||
self._send_key_command("POWER")
|
||||
activity = kwargs.get(ATTR_ACTIVITY, "")
|
||||
if activity:
|
||||
self._send_launch_app_command(activity)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the Android TV off."""
|
||||
if self.is_on:
|
||||
self._send_key_command("POWER")
|
||||
|
||||
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
|
||||
"""Send commands to one device."""
|
||||
num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS)
|
||||
delay_secs = kwargs.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
|
||||
hold_secs = kwargs.get(ATTR_HOLD_SECS, DEFAULT_HOLD_SECS)
|
||||
|
||||
for _ in range(num_repeats):
|
||||
for single_command in command:
|
||||
if hold_secs:
|
||||
self._send_key_command(single_command, "START_LONG")
|
||||
await asyncio.sleep(hold_secs)
|
||||
self._send_key_command(single_command, "END_LONG")
|
||||
else:
|
||||
self._send_key_command(single_command, "SHORT")
|
||||
await asyncio.sleep(delay_secs)
|
||||
|
||||
def _send_key_command(self, key_code: str, direction: str = "SHORT") -> None:
|
||||
"""Send a key press to Android TV.
|
||||
|
||||
This does not block; it buffers the data and arranges for it to be sent out asynchronously.
|
||||
"""
|
||||
try:
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
"""Launch an app on Android TV.
|
||||
|
||||
This does not block; it buffers the data and arranges for it to be sent out asynchronously.
|
||||
"""
|
||||
try:
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Enter the IP address of the Android TV you want to add to Home Assistant. It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Discovered Android TV",
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
},
|
||||
"pair": {
|
||||
"description": "Enter the pairing code displayed on the Android TV ({name}).",
|
||||
"data": {
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "You need to pair again with the Android TV ({name})."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Rest API for Home Assistant."""
|
||||
import asyncio
|
||||
from functools import lru_cache
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
@@ -351,12 +350,6 @@ class APIComponentsView(HomeAssistantView):
|
||||
return self.json(request.app["hass"].config.components)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_template(template_str: str, hass: ha.HomeAssistant) -> template.Template:
|
||||
"""Return a cached template."""
|
||||
return template.Template(template_str, hass)
|
||||
|
||||
|
||||
class APITemplateView(HomeAssistantView):
|
||||
"""View to handle Template requests."""
|
||||
|
||||
@@ -369,7 +362,7 @@ class APITemplateView(HomeAssistantView):
|
||||
raise Unauthorized()
|
||||
try:
|
||||
data = await request.json()
|
||||
tpl = _cached_template(data["template"], request.app["hass"])
|
||||
tpl = template.Template(data["template"], request.app["hass"])
|
||||
return tpl.async_render(variables=data.get("variables"), parse_result=False)
|
||||
except (ValueError, TemplateError) as ex:
|
||||
return self.json_message(
|
||||
|
||||
@@ -75,7 +75,7 @@ class AuthorizationServer:
|
||||
token_url: str
|
||||
|
||||
|
||||
class ApplicationCredentialsStorageCollection(collection.DictStorageCollection):
|
||||
class ApplicationCredentialsStorageCollection(collection.StorageCollection):
|
||||
"""Application credential collection stored in storage."""
|
||||
|
||||
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
|
||||
@@ -94,7 +94,7 @@ class ApplicationCredentialsStorageCollection(collection.DictStorageCollection):
|
||||
return f"{info[CONF_DOMAIN]}.{info[CONF_CLIENT_ID]}"
|
||||
|
||||
async def _update_data(
|
||||
self, item: dict[str, str], update_data: dict[str, str]
|
||||
self, data: dict[str, str], update_data: dict[str, str]
|
||||
) -> dict[str, str]:
|
||||
"""Return a new updated data object."""
|
||||
raise ValueError("Updates not supported")
|
||||
@@ -144,6 +144,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
id_manager = collection.IDManager()
|
||||
storage_collection = ApplicationCredentialsStorageCollection(
|
||||
Store(hass, STORAGE_VERSION, STORAGE_KEY),
|
||||
logging.getLogger(f"{__name__}.storage_collection"),
|
||||
id_manager,
|
||||
)
|
||||
await storage_collection.async_load()
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -32,11 +32,11 @@ async def async_get_triggers(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> list[dict[str, str]]:
|
||||
"""List device triggers for Arcam FMJ Receiver control devices."""
|
||||
registry = er.async_get(hass)
|
||||
registry = entity_registry.async_get(hass)
|
||||
triggers = []
|
||||
|
||||
# Get all the integrations entities for this device
|
||||
for entry in er.async_entries_for_device(registry, device_id):
|
||||
for entry in entity_registry.async_entries_for_device(registry, device_id):
|
||||
if entry.domain == "media_player":
|
||||
triggers.append(
|
||||
{
|
||||
|
||||
@@ -180,7 +180,7 @@ class ArestData:
|
||||
self._resource = resource
|
||||
self._pin = pin
|
||||
self.data = {}
|
||||
self.available = True
|
||||
self._attr_available = True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
@@ -201,7 +201,7 @@ class ArestData:
|
||||
f"{self._resource}/digital/{self._pin}", timeout=10
|
||||
)
|
||||
self.data = {"value": response.json()["return_value"]}
|
||||
self.available = True
|
||||
self._attr_available = True
|
||||
except requests.exceptions.ConnectionError:
|
||||
_LOGGER.error("No route to device %s", self._resource)
|
||||
self.available = False
|
||||
self._attr_available = False
|
||||
|
||||
@@ -33,7 +33,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArrisDeviceScanner:
|
||||
|
||||
|
||||
class ArrisDeviceScanner(DeviceScanner):
|
||||
"""Class which queries a Arris TG2492LG router for connected devices."""
|
||||
"""This class queries a Arris TG2492LG router for connected devices."""
|
||||
|
||||
def __init__(self, connect_box: ConnectBox) -> None:
|
||||
"""Initialize the scanner."""
|
||||
|
||||
@@ -42,7 +42,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArubaDeviceScanner |
|
||||
|
||||
|
||||
class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Class which queries a Aruba Access Point for connected devices."""
|
||||
"""This class queries a Aruba Access Point for connected devices."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize the scanner."""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for collecting data from the ARWN project."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
@@ -10,7 +11,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -102,7 +102,7 @@ async def async_setup_platform(
|
||||
"""Set up the ARWN platform."""
|
||||
|
||||
@callback
|
||||
def async_sensor_event_received(msg: mqtt.ReceiveMessage) -> None:
|
||||
def async_sensor_event_received(msg):
|
||||
"""Process events as sensors.
|
||||
|
||||
When a new event on our topic (arwn/#) is received we map it
|
||||
@@ -115,7 +115,7 @@ async def async_setup_platform(
|
||||
This lets us dynamically incorporate sensors without any
|
||||
configuration on our side.
|
||||
"""
|
||||
event = json_loads_object(msg.payload)
|
||||
event = json.loads(msg.payload)
|
||||
sensors = discover_sensors(msg.topic, event)
|
||||
if not sensors:
|
||||
return
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==1.2.7", "yalexs-ble==2.1.14"]
|
||||
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.4"]
|
||||
}
|
||||
|
||||
@@ -38,10 +38,7 @@ class AugustSubscriberMixin:
|
||||
def _async_setup_listeners(self):
|
||||
"""Create interval and stop listeners."""
|
||||
self._unsub_interval = async_track_time_interval(
|
||||
self._hass,
|
||||
self._async_refresh,
|
||||
self._update_interval,
|
||||
name="august refresh",
|
||||
self._hass, self._async_refresh, self._update_interval
|
||||
)
|
||||
|
||||
@callback
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aurora",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["auroranoaa"],
|
||||
"requirements": ["auroranoaa==0.0.3"]
|
||||
"requirements": ["auroranoaa==0.0.2"]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Support for Aurora Forecast sensor."""
|
||||
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -28,7 +28,6 @@ class AuroraSensor(AuroraEntity, SensorEntity):
|
||||
"""Implementation of an aurora sensor."""
|
||||
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
|
||||
@@ -1,35 +1,9 @@
|
||||
{
|
||||
"title": "Automation",
|
||||
"entity_component": {
|
||||
"state": {
|
||||
"_": {
|
||||
"name": "[%key:component::automation::title%]",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
},
|
||||
"state_attributes": {
|
||||
"current": {
|
||||
"name": "Running automations"
|
||||
},
|
||||
"id": {
|
||||
"name": "ID"
|
||||
},
|
||||
"last_triggered": {
|
||||
"name": "Last triggered"
|
||||
},
|
||||
"max": {
|
||||
"name": "Max running automations"
|
||||
},
|
||||
"mode": {
|
||||
"name": "Run mode",
|
||||
"state": {
|
||||
"parallel": "Parallel",
|
||||
"queued": "Queued",
|
||||
"restart": "Restart",
|
||||
"single": "Single"
|
||||
}
|
||||
}
|
||||
}
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Support for Awair sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
from python_awair.air_data import AirData
|
||||
from python_awair.devices import AwairBaseDevice, AwairLocalDevice
|
||||
@@ -156,7 +156,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity):
|
||||
return round(state, 2)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
def extra_state_attributes(self) -> dict:
|
||||
"""Return the Awair Index alongside state attributes.
|
||||
|
||||
The Awair Index is a subjective score ranging from 0-4 (inclusive) that
|
||||
@@ -178,7 +178,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity):
|
||||
https://docs.developer.getawair.com/?version=latest#awair-score-and-index
|
||||
"""
|
||||
sensor_type = self.entity_description.key
|
||||
attrs: dict[str, Any] = {}
|
||||
attrs: dict = {}
|
||||
if not self._air_data:
|
||||
return attrs
|
||||
if sensor_type in self._air_data.indices:
|
||||
|
||||
@@ -51,6 +51,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
if config_entry.version != 3:
|
||||
# Home Assistant 2023.2
|
||||
config_entry.version = 3
|
||||
hass.config_entries.async_update_entry(config_entry)
|
||||
|
||||
_LOGGER.info("Migration to version %s successful", config_entry.version)
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from pathlib import Path
|
||||
import tarfile
|
||||
from tarfile import TarError
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Protocol, cast
|
||||
from typing import Any, Protocol
|
||||
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
@@ -19,12 +19,9 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import integration_platform
|
||||
from homeassistant.helpers.json import save_json
|
||||
from homeassistant.util import dt
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
|
||||
@dataclass
|
||||
class Backup:
|
||||
@@ -101,13 +98,13 @@ class BackupManager:
|
||||
backups: dict[str, Backup] = {}
|
||||
for backup_path in self.backup_dir.glob("*.tar"):
|
||||
try:
|
||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
with tarfile.open(backup_path, "r:") as backup_file:
|
||||
if data_file := backup_file.extractfile("./backup.json"):
|
||||
data = json_loads_object(data_file.read())
|
||||
data = json.loads(data_file.read())
|
||||
backup = Backup(
|
||||
slug=cast(str, data["slug"]),
|
||||
name=cast(str, data["name"]),
|
||||
date=cast(str, data["date"]),
|
||||
slug=data["slug"],
|
||||
name=data["name"],
|
||||
date=data["date"],
|
||||
path=backup_path,
|
||||
size=round(backup_path.stat().st_size / 1_048_576, 2),
|
||||
)
|
||||
@@ -189,8 +186,13 @@ class BackupManager:
|
||||
"compressed": True,
|
||||
}
|
||||
tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar")
|
||||
size_in_bytes = await self.hass.async_add_executor_job(
|
||||
self._mkdir_and_generate_backup_contents,
|
||||
|
||||
if not self.backup_dir.exists():
|
||||
LOGGER.debug("Creating backup directory")
|
||||
self.hass.async_add_executor_job(self.backup_dir.mkdir)
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._generate_backup_contents,
|
||||
tar_file_path,
|
||||
backup_data,
|
||||
)
|
||||
@@ -199,7 +201,7 @@ class BackupManager:
|
||||
name=backup_name,
|
||||
date=date_str,
|
||||
path=tar_file_path,
|
||||
size=round(size_in_bytes / 1_048_576, 2),
|
||||
size=round(tar_file_path.stat().st_size / 1_048_576, 2),
|
||||
)
|
||||
if self.loaded_backups:
|
||||
self.backups[slug] = backup
|
||||
@@ -218,18 +220,14 @@ class BackupManager:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
def _generate_backup_contents(
|
||||
self,
|
||||
tar_file_path: Path,
|
||||
backup_data: dict[str, Any],
|
||||
) -> int:
|
||||
"""Generate backup contents and return the size."""
|
||||
if not self.backup_dir.exists():
|
||||
LOGGER.debug("Creating backup directory")
|
||||
self.backup_dir.mkdir()
|
||||
|
||||
) -> None:
|
||||
"""Generate backup contents."""
|
||||
with TemporaryDirectory() as tmp_dir, SecureTarFile(
|
||||
tar_file_path, "w", gzip=False, bufsize=BUF_SIZE
|
||||
tar_file_path, "w", gzip=False
|
||||
) as tar_file:
|
||||
tmp_dir_path = Path(tmp_dir)
|
||||
save_json(
|
||||
@@ -239,7 +237,6 @@ class BackupManager:
|
||||
with SecureTarFile(
|
||||
tmp_dir_path.joinpath("./homeassistant.tar.gz").as_posix(),
|
||||
"w",
|
||||
bufsize=BUF_SIZE,
|
||||
) as core_tar:
|
||||
atomic_contents_add(
|
||||
tar_file=core_tar,
|
||||
@@ -248,7 +245,6 @@ class BackupManager:
|
||||
arcname="data",
|
||||
)
|
||||
tar_file.add(tmp_dir_path, arcname=".")
|
||||
return tar_file_path.stat().st_size
|
||||
|
||||
|
||||
def _generate_slug(date: str, name: str) -> str:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2023.3.0"]
|
||||
"requirements": ["securetar==2022.2.0"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/baf",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aiobafi6==0.8.0"],
|
||||
"requirements": ["aiobafi6==0.7.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_api._tcp.local.",
|
||||
|
||||
@@ -39,7 +39,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="comfort_min_speed",
|
||||
name="Auto Comfort Minimum Speed",
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=SPEED_RANGE[1] - 1,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -49,7 +48,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="comfort_max_speed",
|
||||
name="Auto Comfort Maximum Speed",
|
||||
native_step=1,
|
||||
native_min_value=1,
|
||||
native_max_value=SPEED_RANGE[1],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -59,7 +57,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="comfort_heat_assist_speed",
|
||||
name="Auto Comfort Heat Assist Speed",
|
||||
native_step=1,
|
||||
native_min_value=SPEED_RANGE[0],
|
||||
native_max_value=SPEED_RANGE[1],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -72,7 +69,6 @@ FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="return_to_auto_timeout",
|
||||
name="Return to Auto Timeout",
|
||||
native_step=1,
|
||||
native_min_value=ONE_MIN_SECS,
|
||||
native_max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -83,7 +79,6 @@ FAN_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="motion_sense_timeout",
|
||||
name="Motion Sense Timeout",
|
||||
native_step=1,
|
||||
native_min_value=ONE_MIN_SECS,
|
||||
native_max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -97,7 +92,6 @@ LIGHT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="light_return_to_auto_timeout",
|
||||
name="Light Return to Auto Timeout",
|
||||
native_step=1,
|
||||
native_min_value=ONE_MIN_SECS,
|
||||
native_max_value=HALF_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -108,7 +102,6 @@ LIGHT_NUMBER_DESCRIPTIONS = (
|
||||
BAFNumberDescription(
|
||||
key="light_auto_motion_timeout",
|
||||
name="Light Motion Sense Timeout",
|
||||
native_step=1,
|
||||
native_min_value=ONE_MIN_SECS,
|
||||
native_max_value=ONE_DAY_SECS,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/balboa",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pybalboa"],
|
||||
"requirements": ["pybalboa==1.0.1"]
|
||||
"requirements": ["pybalboa==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ from .const import (
|
||||
DEFAULT_PROBABILITY_THRESHOLD,
|
||||
)
|
||||
from .helpers import Observation
|
||||
from .issues import raise_mirrored_entries, raise_no_prob_given_false
|
||||
from .repairs import raise_mirrored_entries, raise_no_prob_given_false
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Helpers for generating issues."""
|
||||
"""Helpers for generating repairs."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers import issue_registry
|
||||
|
||||
from . import DOMAIN
|
||||
from .helpers import Observation
|
||||
@@ -15,13 +15,13 @@ def raise_mirrored_entries(
|
||||
if len(observations) != 2:
|
||||
return
|
||||
if observations[0].is_mirror(observations[1]):
|
||||
ir.async_create_issue(
|
||||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"mirrored_entry/" + text,
|
||||
breaks_in_ha_version="2022.10.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
translation_key="manual_migration",
|
||||
translation_placeholders={"entity": text},
|
||||
learn_more_url="https://github.com/home-assistant/core/pull/67631",
|
||||
@@ -31,13 +31,13 @@ def raise_mirrored_entries(
|
||||
# Should deprecate in some future version (2022.10 at time of writing) & make prob_given_false required in schemas.
|
||||
def raise_no_prob_given_false(hass: HomeAssistant, text: str) -> None:
|
||||
"""In previous 2022.9 and earlier, prob_given_false was optional and had a default version."""
|
||||
ir.async_create_issue(
|
||||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"no_prob_given_false/{text}",
|
||||
breaks_in_ha_version="2022.10.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
severity=issue_registry.IssueSeverity.ERROR,
|
||||
translation_key="no_prob_given_false",
|
||||
translation_placeholders={"entity": text},
|
||||
learn_more_url="https://github.com/home-assistant/core/pull/67631",
|
||||
@@ -42,7 +42,7 @@ Device = namedtuple("Device", ["mac", "name", "ip", "last_update"])
|
||||
|
||||
|
||||
class BboxDeviceScanner(DeviceScanner):
|
||||
"""Scanner for devices connected to the bbox."""
|
||||
"""This class scans for devices connected to the bbox."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Get host from config."""
|
||||
|
||||
@@ -106,195 +106,114 @@
|
||||
"turned_off": "{entity_name} turned off"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "[%key:component::binary_sensor::title%]",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"battery": {
|
||||
"name": "Battery",
|
||||
"state": {
|
||||
"off": "Normal",
|
||||
"on": "Low"
|
||||
}
|
||||
"off": "Normal",
|
||||
"on": "Low"
|
||||
},
|
||||
"battery_charging": {
|
||||
"name": "Charging",
|
||||
"state": {
|
||||
"off": "Not charging",
|
||||
"on": "Charging"
|
||||
}
|
||||
"off": "Not charging",
|
||||
"on": "Charging"
|
||||
},
|
||||
"carbon_monoxide": {
|
||||
"name": "Carbon monoxide",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"cold": {
|
||||
"name": "Cold",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"on": "Cold"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::battery::off%]",
|
||||
"on": "Cold"
|
||||
},
|
||||
"connectivity": {
|
||||
"name": "Connectivity",
|
||||
"state": {
|
||||
"off": "[%key:common::state::disconnected%]",
|
||||
"on": "[%key:common::state::connected%]"
|
||||
}
|
||||
"off": "[%key:common::state::disconnected%]",
|
||||
"on": "[%key:common::state::connected%]"
|
||||
},
|
||||
"door": {
|
||||
"name": "Door",
|
||||
"state": {
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
}
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
},
|
||||
"garage_door": {
|
||||
"name": "Garage door",
|
||||
"state": {
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
}
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
},
|
||||
"gas": {
|
||||
"name": "Gas",
|
||||
"state": {
|
||||
"off": "Clear",
|
||||
"on": "Detected"
|
||||
}
|
||||
"off": "Clear",
|
||||
"on": "Detected"
|
||||
},
|
||||
"heat": {
|
||||
"name": "Heat",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"on": "Hot"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::battery::off%]",
|
||||
"on": "Hot"
|
||||
},
|
||||
"light": {
|
||||
"name": "Light",
|
||||
"state": {
|
||||
"off": "No light",
|
||||
"on": "Light detected"
|
||||
}
|
||||
"off": "No light",
|
||||
"on": "Light detected"
|
||||
},
|
||||
"lock": {
|
||||
"name": "Lock",
|
||||
"state": {
|
||||
"off": "[%key:common::state::locked%]",
|
||||
"on": "[%key:common::state::unlocked%]"
|
||||
}
|
||||
"off": "[%key:common::state::locked%]",
|
||||
"on": "[%key:common::state::unlocked%]"
|
||||
},
|
||||
"moisture": {
|
||||
"name": "Moisture",
|
||||
"state": {
|
||||
"off": "Dry",
|
||||
"on": "Wet"
|
||||
}
|
||||
"off": "Dry",
|
||||
"on": "Wet"
|
||||
},
|
||||
"motion": {
|
||||
"name": "Motion",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"moving": {
|
||||
"name": "Moving",
|
||||
"state": {
|
||||
"off": "Not moving",
|
||||
"on": "Moving"
|
||||
}
|
||||
"off": "Not moving",
|
||||
"on": "Moving"
|
||||
},
|
||||
"occupancy": {
|
||||
"name": "Occupancy",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"opening": {
|
||||
"name": "Opening",
|
||||
"state": {
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
}
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
},
|
||||
"plug": {
|
||||
"name": "Plug",
|
||||
"state": {
|
||||
"off": "Unplugged",
|
||||
"on": "Plugged in"
|
||||
}
|
||||
"off": "Unplugged",
|
||||
"on": "Plugged in"
|
||||
},
|
||||
"presence": {
|
||||
"name": "Presence",
|
||||
"state": {
|
||||
"off": "[%key:component::device_tracker::entity_component::_::state::not_home%]",
|
||||
"on": "[%key:component::device_tracker::entity_component::_::state::home%]"
|
||||
}
|
||||
"off": "[%key:component::device_tracker::state::_::not_home%]",
|
||||
"on": "[%key:component::device_tracker::state::_::home%]"
|
||||
},
|
||||
"problem": {
|
||||
"name": "Problem",
|
||||
"state": {
|
||||
"off": "OK",
|
||||
"on": "Problem"
|
||||
}
|
||||
"off": "OK",
|
||||
"on": "Problem"
|
||||
},
|
||||
"running": {
|
||||
"name": "Running",
|
||||
"state": {
|
||||
"off": "Not running",
|
||||
"on": "Running"
|
||||
}
|
||||
"off": "Not running",
|
||||
"on": "Running"
|
||||
},
|
||||
"safety": {
|
||||
"name": "Safety",
|
||||
"state": {
|
||||
"off": "Safe",
|
||||
"on": "Unsafe"
|
||||
}
|
||||
"off": "Safe",
|
||||
"on": "Unsafe"
|
||||
},
|
||||
"smoke": {
|
||||
"name": "Smoke",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"sound": {
|
||||
"name": "Sound",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"update": {
|
||||
"name": "Update",
|
||||
"state": {
|
||||
"off": "Up-to-date",
|
||||
"on": "Update available"
|
||||
}
|
||||
"off": "Up-to-date",
|
||||
"on": "Update available"
|
||||
},
|
||||
"vibration": {
|
||||
"name": "Vibration",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
"off": "[%key:component::binary_sensor::state::gas::off%]",
|
||||
"on": "[%key:component::binary_sensor::state::gas::on%]"
|
||||
},
|
||||
"window": {
|
||||
"name": "Window",
|
||||
"state": {
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
}
|
||||
"off": "[%key:common::state::closed%]",
|
||||
"on": "[%key:common::state::open%]"
|
||||
},
|
||||
"_": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"device_class": {
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blackbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyblackbird"],
|
||||
"requirements": ["pyblackbird==0.6"]
|
||||
"requirements": ["pyblackbird==0.5"]
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ class BleBoxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
) -> FlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
hass = self.hass
|
||||
ipaddress = (discovery_info.host, discovery_info.port)
|
||||
ipaddress = host_port(discovery_info.__dict__)
|
||||
self.device_config["host"] = discovery_info.host
|
||||
self.device_config["port"] = discovery_info.port
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import blebox_uniapi.cover
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
ATTR_TILT_POSITION,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
CoverEntityFeature,
|
||||
@@ -68,10 +67,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
self._attr_supported_features = (
|
||||
position | stop | CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if feature.has_tilt:
|
||||
self._attr_supported_features = (
|
||||
self._attr_supported_features | CoverEntityFeature.SET_TILT_POSITION
|
||||
)
|
||||
|
||||
@property
|
||||
def current_cover_position(self) -> int | None:
|
||||
@@ -82,12 +77,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
|
||||
return None if position is None else 100 - position
|
||||
|
||||
@property
|
||||
def current_cover_tilt_position(self) -> int | None:
|
||||
"""Return the current tilt of shutter."""
|
||||
position = self._feature.tilt_current
|
||||
return None if position is None else 100 - position
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool | None:
|
||||
"""Return whether cover is opening."""
|
||||
@@ -121,12 +110,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
"""Stop the cover."""
|
||||
await self._feature.async_stop()
|
||||
|
||||
async def async_set_cover_tilt_position(self, **kwargs: Any) -> None:
|
||||
"""Set the tilt position."""
|
||||
|
||||
position = kwargs[ATTR_TILT_POSITION]
|
||||
await self._feature.async_set_tilt_position(100 - position)
|
||||
|
||||
def _is_state(self, state_name) -> bool | None:
|
||||
value = BLEBOX_TO_HASS_COVER_STATES[self._feature.state]
|
||||
return None if value is None else value == state_name
|
||||
|
||||
@@ -143,7 +143,7 @@ class ActiveBluetoothDataUpdateCoordinator(
|
||||
self._last_poll = monotonic_time_coarse()
|
||||
|
||||
if not self.last_poll_successful:
|
||||
self.logger.debug("%s: Polling recovered", self.address)
|
||||
self.logger.debug("%s: Polling recovered")
|
||||
self.last_poll_successful = True
|
||||
|
||||
self._async_handle_bluetooth_poll()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user