mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 11:59:37 +00:00
Compare commits
1 Commits
2024.4.0b7
...
bump-openc
Author | SHA1 | Date | |
---|---|---|---|
![]() |
628cc5dccb |
28
.coveragerc
28
.coveragerc
@@ -202,7 +202,6 @@ omit =
|
||||
homeassistant/components/control4/__init__.py
|
||||
homeassistant/components/control4/director_utils.py
|
||||
homeassistant/components/control4/light.py
|
||||
homeassistant/components/control4/media_player.py
|
||||
homeassistant/components/coolmaster/coordinator.py
|
||||
homeassistant/components/cppm_tracker/device_tracker.py
|
||||
homeassistant/components/crownstone/__init__.py
|
||||
@@ -251,7 +250,7 @@ omit =
|
||||
homeassistant/components/dormakaba_dkey/lock.py
|
||||
homeassistant/components/dormakaba_dkey/sensor.py
|
||||
homeassistant/components/dovado/*
|
||||
homeassistant/components/downloader/__init__.py
|
||||
homeassistant/components/downloader/*
|
||||
homeassistant/components/dsmr_reader/__init__.py
|
||||
homeassistant/components/dsmr_reader/definitions.py
|
||||
homeassistant/components/dsmr_reader/sensor.py
|
||||
@@ -462,10 +461,6 @@ omit =
|
||||
homeassistant/components/frontier_silicon/browse_media.py
|
||||
homeassistant/components/frontier_silicon/media_player.py
|
||||
homeassistant/components/futurenow/light.py
|
||||
homeassistant/components/fyta/__init__.py
|
||||
homeassistant/components/fyta/coordinator.py
|
||||
homeassistant/components/fyta/entity.py
|
||||
homeassistant/components/fyta/sensor.py
|
||||
homeassistant/components/garadget/cover.py
|
||||
homeassistant/components/garages_amsterdam/__init__.py
|
||||
homeassistant/components/garages_amsterdam/binary_sensor.py
|
||||
@@ -550,6 +545,10 @@ omit =
|
||||
homeassistant/components/homematic/notify.py
|
||||
homeassistant/components/homematic/sensor.py
|
||||
homeassistant/components/homematic/switch.py
|
||||
homeassistant/components/homeworks/__init__.py
|
||||
homeassistant/components/homeworks/binary_sensor.py
|
||||
homeassistant/components/homeworks/button.py
|
||||
homeassistant/components/homeworks/light.py
|
||||
homeassistant/components/horizon/media_player.py
|
||||
homeassistant/components/hp_ilo/sensor.py
|
||||
homeassistant/components/huawei_lte/__init__.py
|
||||
@@ -748,6 +747,7 @@ omit =
|
||||
homeassistant/components/lyric/climate.py
|
||||
homeassistant/components/lyric/sensor.py
|
||||
homeassistant/components/mailgun/notify.py
|
||||
homeassistant/components/map/*
|
||||
homeassistant/components/mastodon/notify.py
|
||||
homeassistant/components/matrix/__init__.py
|
||||
homeassistant/components/matrix/notify.py
|
||||
@@ -776,11 +776,9 @@ omit =
|
||||
homeassistant/components/microbees/__init__.py
|
||||
homeassistant/components/microbees/api.py
|
||||
homeassistant/components/microbees/application_credentials.py
|
||||
homeassistant/components/microbees/binary_sensor.py
|
||||
homeassistant/components/microbees/button.py
|
||||
homeassistant/components/microbees/const.py
|
||||
homeassistant/components/microbees/coordinator.py
|
||||
homeassistant/components/microbees/cover.py
|
||||
homeassistant/components/microbees/entity.py
|
||||
homeassistant/components/microbees/light.py
|
||||
homeassistant/components/microbees/sensor.py
|
||||
@@ -806,11 +804,6 @@ omit =
|
||||
homeassistant/components/motion_blinds/cover.py
|
||||
homeassistant/components/motion_blinds/entity.py
|
||||
homeassistant/components/motion_blinds/sensor.py
|
||||
homeassistant/components/motionblinds_ble/__init__.py
|
||||
homeassistant/components/motionblinds_ble/button.py
|
||||
homeassistant/components/motionblinds_ble/cover.py
|
||||
homeassistant/components/motionblinds_ble/entity.py
|
||||
homeassistant/components/motionblinds_ble/select.py
|
||||
homeassistant/components/motionmount/__init__.py
|
||||
homeassistant/components/motionmount/binary_sensor.py
|
||||
homeassistant/components/motionmount/entity.py
|
||||
@@ -934,6 +927,7 @@ omit =
|
||||
homeassistant/components/onvif/sensor.py
|
||||
homeassistant/components/onvif/util.py
|
||||
homeassistant/components/open_meteo/weather.py
|
||||
homeassistant/components/opencv/*
|
||||
homeassistant/components/openevse/sensor.py
|
||||
homeassistant/components/openexchangerates/__init__.py
|
||||
homeassistant/components/openexchangerates/coordinator.py
|
||||
@@ -956,9 +950,7 @@ omit =
|
||||
homeassistant/components/openuv/binary_sensor.py
|
||||
homeassistant/components/openuv/coordinator.py
|
||||
homeassistant/components/openuv/sensor.py
|
||||
homeassistant/components/openweathermap/__init__.py
|
||||
homeassistant/components/openweathermap/sensor.py
|
||||
homeassistant/components/openweathermap/weather.py
|
||||
homeassistant/components/openweathermap/weather_update_coordinator.py
|
||||
homeassistant/components/opnsense/__init__.py
|
||||
homeassistant/components/opower/__init__.py
|
||||
@@ -1069,7 +1061,6 @@ omit =
|
||||
homeassistant/components/rabbitair/fan.py
|
||||
homeassistant/components/rachio/__init__.py
|
||||
homeassistant/components/rachio/binary_sensor.py
|
||||
homeassistant/components/rachio/coordinator.py
|
||||
homeassistant/components/rachio/device.py
|
||||
homeassistant/components/rachio/entity.py
|
||||
homeassistant/components/rachio/switch.py
|
||||
@@ -1142,7 +1133,6 @@ omit =
|
||||
homeassistant/components/rocketchat/notify.py
|
||||
homeassistant/components/romy/__init__.py
|
||||
homeassistant/components/romy/coordinator.py
|
||||
homeassistant/components/romy/entity.py
|
||||
homeassistant/components/romy/vacuum.py
|
||||
homeassistant/components/roomba/__init__.py
|
||||
homeassistant/components/roomba/binary_sensor.py
|
||||
@@ -1157,6 +1147,7 @@ omit =
|
||||
homeassistant/components/roon/media_player.py
|
||||
homeassistant/components/roon/server.py
|
||||
homeassistant/components/route53/*
|
||||
homeassistant/components/rova/sensor.py
|
||||
homeassistant/components/rpi_camera/*
|
||||
homeassistant/components/rtorrent/sensor.py
|
||||
homeassistant/components/ruuvi_gateway/__init__.py
|
||||
@@ -1295,7 +1286,6 @@ omit =
|
||||
homeassistant/components/starlink/device_tracker.py
|
||||
homeassistant/components/starlink/sensor.py
|
||||
homeassistant/components/starlink/switch.py
|
||||
homeassistant/components/starlink/time.py
|
||||
homeassistant/components/starline/__init__.py
|
||||
homeassistant/components/starline/account.py
|
||||
homeassistant/components/starline/binary_sensor.py
|
||||
@@ -1440,7 +1430,6 @@ omit =
|
||||
homeassistant/components/tolo/number.py
|
||||
homeassistant/components/tolo/select.py
|
||||
homeassistant/components/tolo/sensor.py
|
||||
homeassistant/components/tolo/switch.py
|
||||
homeassistant/components/toon/__init__.py
|
||||
homeassistant/components/toon/binary_sensor.py
|
||||
homeassistant/components/toon/climate.py
|
||||
@@ -1692,7 +1681,6 @@ omit =
|
||||
homeassistant/components/yolink/services.py
|
||||
homeassistant/components/yolink/siren.py
|
||||
homeassistant/components/yolink/switch.py
|
||||
homeassistant/components/yolink/valve.py
|
||||
homeassistant/components/youless/__init__.py
|
||||
homeassistant/components/youless/sensor.py
|
||||
homeassistant/components/zabbix/*
|
||||
|
@@ -21,7 +21,6 @@
|
||||
],
|
||||
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
|
||||
"settings": {
|
||||
"python.experiments.optOutFrom": ["pythonTestAdapter"],
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"editor.formatOnPaste": false,
|
||||
|
@@ -1,14 +0,0 @@
|
||||
# Black
|
||||
4de97abc3aa83188666336ce0a015a5bab75bc8f
|
||||
|
||||
# Switch formatting from black to ruff-format (#102893)
|
||||
706add4a57120a93d7b7fe40e722b00d634c76c2
|
||||
|
||||
# Prettify json (component test fixtures) (#68892)
|
||||
053c4428a933c3c04c22642f93c93fccba3e8bfd
|
||||
|
||||
# Prettify json (tests) (#68888)
|
||||
496d90bf00429d9d924caeb0155edc0bf54e86b9
|
||||
|
||||
# Bump ruff to 0.3.4 (#112690)
|
||||
6bb4e7d62c60389608acf4a7d7dacd8f029307dd
|
216
.github/workflows/builder.yml
vendored
216
.github/workflows/builder.yml
vendored
@@ -12,8 +12,6 @@ env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -32,7 +30,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -51,29 +49,41 @@ jobs:
|
||||
with:
|
||||
ignore-dev: true
|
||||
|
||||
- name: Fail if translations files are checked in
|
||||
run: |
|
||||
if [ -n "$(find homeassistant/components/*/translations -type f)" ]; then
|
||||
echo "Translations files are checked in, please remove the following files:"
|
||||
find homeassistant/components/*/translations -type f
|
||||
exit 1
|
||||
fi
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Archive translations
|
||||
- name: Build package
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
python -m build
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.3.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
|
||||
build_base:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
@@ -85,7 +95,6 @@ jobs:
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
@@ -94,7 +103,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v3.1.4
|
||||
uses: dawidd6/action-download-artifact@v3.1.2
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +114,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v3.1.4
|
||||
uses: dawidd6/action-download-artifact@v3.1.2
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -116,20 +125,17 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Adjust nightly version
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
shell: bash
|
||||
env:
|
||||
UV_PRERELEASE: allow
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install packaging tomli
|
||||
uv pip install .
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
|
||||
python3 -m pip install packaging tomli
|
||||
python3 -m pip install .
|
||||
version="$(python3 script/version_bump.py nightly)"
|
||||
|
||||
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
|
||||
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
|
||||
@@ -141,7 +147,7 @@ jobs:
|
||||
sed -i "s|home-assistant-frontend==.*|home-assistant-frontend==${BASH_REMATCH[1]}|" \
|
||||
homeassistant/package_constraints.txt
|
||||
|
||||
sed -i "s|home-assistant-frontend==.*||" requirements_all.txt
|
||||
python -m script.gen_requirements_all
|
||||
fi
|
||||
|
||||
if [[ "$(ls home_assistant_intents*.whl)" =~ ^home_assistant_intents-(.*)-py3-none-any.whl$ ]]; then
|
||||
@@ -159,7 +165,7 @@ jobs:
|
||||
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
|
||||
homeassistant/package_constraints.txt
|
||||
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||
python -m script.gen_requirements_all
|
||||
fi
|
||||
|
||||
- name: Adjustments for armhf
|
||||
@@ -183,15 +189,10 @@ jobs:
|
||||
# are not available.
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
@@ -199,14 +200,14 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.1.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
uses: home-assistant/builder@2024.01.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -215,6 +216,17 @@ jobs:
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Archive translations
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
@@ -265,14 +277,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.1.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.03.5
|
||||
uses: home-assistant/builder@2024.01.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -324,9 +336,6 @@ jobs:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
@@ -334,18 +343,16 @@ jobs:
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
cosign-release: "v2.0.2"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.1.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.1.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -359,37 +366,41 @@ jobs:
|
||||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
for registry in "ghcr.io/home-assistant" "docker.io/homeassistant"
|
||||
do
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
|
||||
done
|
||||
}
|
||||
|
||||
function validate_image() {
|
||||
@@ -422,14 +433,12 @@ jobs:
|
||||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
@@ -450,44 +459,3 @@ jobs:
|
||||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.4
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
|
||||
- name: Build package
|
||||
shell: bash
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
python -m build
|
||||
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
|
70
.github/workflows/ci.yaml
vendored
70
.github/workflows/ci.yaml
vendored
@@ -225,13 +225,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.2
|
||||
uses: actions/cache@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -243,11 +243,11 @@ jobs:
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
pip install "$(cat requirements_test.txt | grep uv)"
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.2
|
||||
uses: actions/cache@v4.0.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -270,14 +270,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -310,14 +310,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -326,7 +326,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,14 +349,14 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -365,7 +365,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -444,7 +444,7 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -455,7 +455,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.2
|
||||
uses: actions/cache@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
lookup-only: true
|
||||
@@ -464,9 +464,9 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.0.2
|
||||
uses: actions/cache@v4.0.1
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
path: ${{ env.UV_CACHE }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
@@ -493,7 +493,7 @@ jobs:
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
pip install "$(cat requirements_test.txt | grep uv)"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements_all.txt
|
||||
uv pip install -r requirements_test.txt
|
||||
@@ -513,13 +513,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -545,13 +545,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -578,13 +578,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -635,7 +635,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -643,7 +643,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.0.2
|
||||
uses: actions/cache@v4.0.1
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -704,13 +704,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -851,13 +851,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -973,13 +973,13 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
uses: actions/cache/restore@v4.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1070,7 +1070,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov (full coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: Wandalen/wretry.action@v2.1.0
|
||||
uses: Wandalen/wretry.action@v1.4.10
|
||||
with:
|
||||
action: codecov/codecov-action@v3.1.3
|
||||
with: |
|
||||
@@ -1081,7 +1081,7 @@ jobs:
|
||||
attempt_delay: 30000
|
||||
- name: Upload coverage to Codecov (partial coverage)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: Wandalen/wretry.action@v2.1.0
|
||||
uses: Wandalen/wretry.action@v1.4.10
|
||||
with:
|
||||
action: codecov/codecov-action@v3.1.3
|
||||
with: |
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.24.9
|
||||
uses: github/codeql-action/init@v3.24.7
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.24.9
|
||||
uses: github/codeql-action/analyze@v3.24.7
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.1.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.1.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.4
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -8,11 +8,11 @@ repos:
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
rev: v2.2.2
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=additionals,alle,alot,astroid,bund,caf,convencional,currenty,datas,farenheit,falsy,fo,frequence,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,vor,withing,zar
|
||||
- --ignore-words-list=additionals,alle,alot,bund,currenty,datas,farenheit,falsy,fo,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,withing,zar
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json]
|
||||
@@ -30,7 +30,7 @@ repos:
|
||||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.35.1
|
||||
rev: v1.32.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
12
CODEOWNERS
12
CODEOWNERS
@@ -309,8 +309,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/homeassistant/components/dormakaba_dkey/ @emontnemery
|
||||
/tests/components/dormakaba_dkey/ @emontnemery
|
||||
/homeassistant/components/downloader/ @erwindouna
|
||||
/tests/components/downloader/ @erwindouna
|
||||
/homeassistant/components/dremel_3d_printer/ @tkdrob
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
@@ -455,8 +453,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/frontier_silicon/ @wlcrs
|
||||
/homeassistant/components/fully_kiosk/ @cgarwood
|
||||
/tests/components/fully_kiosk/ @cgarwood
|
||||
/homeassistant/components/fyta/ @dontinelli
|
||||
/tests/components/fyta/ @dontinelli
|
||||
/homeassistant/components/garages_amsterdam/ @klaasnicolaas
|
||||
/tests/components/garages_amsterdam/ @klaasnicolaas
|
||||
/homeassistant/components/gardena_bluetooth/ @elupus
|
||||
@@ -572,8 +568,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematic/ @pvizeli @danielperna84
|
||||
/tests/components/homematic/ @pvizeli @danielperna84
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
@@ -841,8 +837,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/mopeka/ @bdraco
|
||||
/homeassistant/components/motion_blinds/ @starkillerOG
|
||||
/tests/components/motion_blinds/ @starkillerOG
|
||||
/homeassistant/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/homeassistant/components/motioneye/ @dermotduffy
|
||||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
@@ -933,8 +927,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
||||
/tests/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
||||
|
28
Dockerfile
28
Dockerfile
@@ -6,47 +6,47 @@ FROM ${BUILD_FROM}
|
||||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true
|
||||
S6_SERVICES_GRACETIME=240000
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.1.24
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant Core dependencies
|
||||
COPY requirements.txt homeassistant/
|
||||
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements.txt
|
||||
|
||||
COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/
|
||||
RUN \
|
||||
if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \
|
||||
uv pip install homeassistant/home_assistant_*.whl; \
|
||||
if ls homeassistant/home_assistant_frontend*.whl 1> /dev/null 2>&1; then \
|
||||
pip3 install homeassistant/home_assistant_frontend-*.whl; \
|
||||
fi \
|
||||
&& if ls homeassistant/home_assistant_intents*.whl 1> /dev/null 2>&1; then \
|
||||
pip3 install homeassistant/home_assistant_intents-*.whl; \
|
||||
fi \
|
||||
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
linux32 uv pip install \
|
||||
--no-build \
|
||||
linux32 pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
else \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
fi
|
||||
|
||||
## Setup Home Assistant Core
|
||||
COPY . homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-e ./homeassistant \
|
||||
&& python3 -m compileall \
|
||||
homeassistant/homeassistant
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.03.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.03.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.03.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.03.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.03.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
@@ -14,7 +15,6 @@ from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
MULTI_FACTOR_AUTH_MODULES: Registry[str, type[MultiFactorAuthModule]] = Registry()
|
||||
@@ -149,7 +149,7 @@ async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.Modul
|
||||
module_path = f"homeassistant.auth.mfa_modules.{module_name}"
|
||||
|
||||
try:
|
||||
module = await async_import_module(hass, module_path)
|
||||
module = importlib.import_module(module_path)
|
||||
except ImportError as err:
|
||||
_LOGGER.error("Unable to load mfa module %s: %s", module_name, err)
|
||||
raise HomeAssistantError(
|
||||
|
@@ -58,7 +58,10 @@ def _merge_policies(sources: list[CategoryType]) -> CategoryType:
|
||||
continue
|
||||
seen.add(key)
|
||||
|
||||
key_sources = [src.get(key) for src in sources if isinstance(src, dict)]
|
||||
key_sources = []
|
||||
for src in sources:
|
||||
if isinstance(src, dict):
|
||||
key_sources.append(src.get(key))
|
||||
|
||||
policy[key] = _merge_policies(key_sources)
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import importlib
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
@@ -14,7 +15,6 @@ from homeassistant import data_entry_flow, requirements
|
||||
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
@@ -157,9 +157,7 @@ async def load_auth_provider_module(
|
||||
) -> types.ModuleType:
|
||||
"""Load an auth provider."""
|
||||
try:
|
||||
module = await async_import_module(
|
||||
hass, f"homeassistant.auth.providers.{provider}"
|
||||
)
|
||||
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
|
||||
except ImportError as err:
|
||||
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
|
||||
raise HomeAssistantError(
|
||||
|
@@ -41,10 +41,12 @@ class cached_property(Generic[_T]):
|
||||
)
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ...
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T: ...
|
||||
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T:
|
||||
...
|
||||
|
||||
def __get__(
|
||||
self, instance: Any | None, owner: type[Any] | None = None
|
||||
|
@@ -3,13 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
import contextlib
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import logging.handlers
|
||||
from operator import contains, itemgetter
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
@@ -34,11 +32,9 @@ from .components import (
|
||||
api as api_pre_import, # noqa: F401
|
||||
auth as auth_pre_import, # noqa: F401
|
||||
config as config_pre_import, # noqa: F401
|
||||
default_config as default_config_pre_import, # noqa: F401
|
||||
device_automation as device_automation_pre_import, # noqa: F401
|
||||
diagnostics as diagnostics_pre_import, # noqa: F401
|
||||
file_upload as file_upload_pre_import, # noqa: F401
|
||||
group as group_pre_import, # noqa: F401
|
||||
history as history_pre_import, # noqa: F401
|
||||
http, # not named pre_import since it has requirements
|
||||
image_upload as image_upload_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
@@ -64,7 +60,6 @@ from .const import (
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
entity,
|
||||
@@ -78,13 +73,11 @@ from .helpers import (
|
||||
translation,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
BASE_PLATFORMS,
|
||||
DATA_SETUP_STARTED,
|
||||
async_get_setup_timings,
|
||||
DATA_SETUP_TIME,
|
||||
async_notify_setup_error,
|
||||
async_set_domains_to_be_loaded,
|
||||
async_setup_component,
|
||||
@@ -93,19 +86,11 @@ from .util.async_ import create_eager_task
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_virtual_env
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
from anyio._backends import _asyncio # noqa: F401
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .runner import RuntimeConfig
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SETUP_ORDER_SORT_KEY = partial(contains, BASE_PLATFORMS)
|
||||
|
||||
|
||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||
|
||||
# hass.data key for logging information.
|
||||
@@ -200,35 +185,16 @@ CRITICAL_INTEGRATIONS = {
|
||||
"frontend",
|
||||
}
|
||||
|
||||
SETUP_ORDER = (
|
||||
SETUP_ORDER = {
|
||||
# Load logging as soon as possible
|
||||
("logging", LOGGING_INTEGRATIONS),
|
||||
# Setup frontend and recorder
|
||||
("frontend, recorder", {*FRONTEND_INTEGRATIONS, *RECORDER_INTEGRATIONS}),
|
||||
"logging": LOGGING_INTEGRATIONS,
|
||||
# Setup frontend
|
||||
"frontend": FRONTEND_INTEGRATIONS,
|
||||
# Setup recorder
|
||||
"recorder": RECORDER_INTEGRATIONS,
|
||||
# Start up debuggers. Start these first in case they want to wait.
|
||||
("debugger", DEBUGGER_INTEGRATIONS),
|
||||
)
|
||||
|
||||
#
|
||||
# Storage keys we are likely to load during startup
|
||||
# in order of when we expect to load them.
|
||||
#
|
||||
# If they do not exist they will not be loaded
|
||||
#
|
||||
PRELOAD_STORAGE = [
|
||||
"core.network",
|
||||
"http.auth",
|
||||
"image",
|
||||
"lovelace_dashboards",
|
||||
"lovelace_resources",
|
||||
"core.uuid",
|
||||
"lovelace.map",
|
||||
"bluetooth.passive_update_processor",
|
||||
"bluetooth.remote_scanners",
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
]
|
||||
"debugger": DEBUGGER_INTEGRATIONS,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_hass(
|
||||
@@ -366,16 +332,14 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
asyncio event loop. By primeing the cache of uname we can
|
||||
avoid the blocking call in the event loop.
|
||||
"""
|
||||
_ = platform.uname().processor
|
||||
platform.uname().processor # pylint: disable=expression-not-assigned
|
||||
|
||||
# Load the registries and cache the result of platform.uname().processor
|
||||
translation.async_setup(hass)
|
||||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
create_eager_task(category_registry.async_load(hass)),
|
||||
create_eager_task(device_registry.async_load(hass)),
|
||||
create_eager_task(entity_registry.async_load(hass)),
|
||||
create_eager_task(floor_registry.async_load(hass)),
|
||||
@@ -385,7 +349,6 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
)
|
||||
|
||||
|
||||
@@ -625,9 +588,7 @@ class _WatchPendingSetups:
|
||||
"""Periodic log and dispatch of setups that are pending."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: core.HomeAssistant,
|
||||
setup_started: dict[tuple[str, str | None], float],
|
||||
self, hass: core.HomeAssistant, setup_started: dict[str, float]
|
||||
) -> None:
|
||||
"""Initialize the WatchPendingSetups class."""
|
||||
self._hass = hass
|
||||
@@ -642,11 +603,10 @@ class _WatchPendingSetups:
|
||||
now = monotonic()
|
||||
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
|
||||
|
||||
remaining_with_setup_started: defaultdict[str, float] = defaultdict(float)
|
||||
for integration_group, start_time in self._setup_started.items():
|
||||
domain, _ = integration_group
|
||||
remaining_with_setup_started[domain] += now - start_time
|
||||
|
||||
remaining_with_setup_started = {
|
||||
domain: (now - start_time)
|
||||
for domain, start_time in self._setup_started.items()
|
||||
}
|
||||
if remaining_with_setup_started:
|
||||
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
||||
elif waiting_tasks := self._hass._active_tasks: # pylint: disable=protected-access
|
||||
@@ -660,7 +620,7 @@ class _WatchPendingSetups:
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
", ".join(self._setup_started),
|
||||
)
|
||||
|
||||
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
|
||||
@@ -700,18 +660,13 @@ async def async_setup_multi_components(
|
||||
"""Set up multiple domains. Log on failure."""
|
||||
# Avoid creating tasks for domains that were setup in a previous stage
|
||||
domains_not_yet_setup = domains - hass.config.components
|
||||
# Create setup tasks for base platforms first since everything will have
|
||||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
futures = {
|
||||
domain: hass.async_create_task(
|
||||
async_setup_component(hass, domain, config),
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
)
|
||||
for domain in sorted(
|
||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
||||
)
|
||||
for domain in domains_not_yet_setup
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
@@ -728,53 +683,29 @@ async def _async_resolve_domains_to_setup(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||
"""Resolve all dependencies and return list of domains to set up."""
|
||||
base_platforms_loaded = False
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
needed_requirements: set[str] = set()
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to
|
||||
# to `domains_to_setup so they can be setup first instead of
|
||||
# discovering them when later when a config entry setup task
|
||||
# notices its needed and there is already a long line to use
|
||||
# the import executor.
|
||||
#
|
||||
# For example if we have
|
||||
# sensor:
|
||||
# - platform: template
|
||||
#
|
||||
# `template` has to be loaded to validate the config for sensor
|
||||
# so we want to start loading `sensor` as soon as we know
|
||||
# it will be needed. The more platforms under `sensor:`, the longer
|
||||
# it will take to finish setup for `sensor` because each of these
|
||||
# platforms has to be imported before we can validate the config.
|
||||
#
|
||||
# Thankfully we are migrating away from the platform pattern
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Load manifests for base platforms and platform based integrations
|
||||
# that are defined under base platforms right away since we do not require
|
||||
# the manifest to list them as dependencies and we want to avoid the lock
|
||||
# contention when multiple integrations try to load them at once
|
||||
additional_manifests_to_load = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
}
|
||||
|
||||
translations_to_load = additional_manifests_to_load.copy()
|
||||
|
||||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start right-away
|
||||
# that will have to be loaded and start rightaway
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve or additional_manifests_to_load:
|
||||
while to_resolve:
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
|
||||
if additional_manifests_to_load:
|
||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
||||
additional_manifests_to_load.clear()
|
||||
if not base_platforms_loaded:
|
||||
# Load base platforms right away since
|
||||
# we do not require the manifest to list
|
||||
# them as dependencies and we want
|
||||
# to avoid the lock contention when multiple
|
||||
# integrations try to resolve them at once
|
||||
base_platforms_loaded = True
|
||||
to_get = {*old_to_resolve, *BASE_PLATFORMS, *platform_integrations}
|
||||
else:
|
||||
to_get = old_to_resolve
|
||||
|
||||
@@ -787,17 +718,6 @@ async def _async_resolve_domains_to_setup(
|
||||
continue
|
||||
integration_cache[domain] = itg
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
# Make sure manifests for dependencies are loaded in the next
|
||||
# loop to try to group as many as manifest loads in a single
|
||||
# call to avoid the creating one-off executor jobs later in
|
||||
# the setup process
|
||||
additional_manifests_to_load.update(
|
||||
dep
|
||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
||||
if dep not in integration_cache
|
||||
)
|
||||
|
||||
if domain not in old_to_resolve:
|
||||
continue
|
||||
|
||||
@@ -846,12 +766,6 @@ async def _async_resolve_domains_to_setup(
|
||||
"check installed requirements",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
#
|
||||
# Only add the domains_to_setup after we finish resolving
|
||||
# as new domains are likely to added in the process
|
||||
#
|
||||
translations_to_load.update(domains_to_setup)
|
||||
# Start loading translations for all integrations we are going to set up
|
||||
# in the background so they are ready when we need them. This avoids a
|
||||
# lot of waiting for the translation load lock and a thundering herd of
|
||||
@@ -863,19 +777,10 @@ async def _async_resolve_domains_to_setup(
|
||||
# wait for the translation load lock, loading will be done by the
|
||||
# time it gets to it.
|
||||
hass.async_create_background_task(
|
||||
translation.async_load_integrations(hass, translations_to_load),
|
||||
"load translations",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
# Preload storage for all integrations we are going to set up
|
||||
# so we do not have to wait for it to be loaded when we need it
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
translation.async_load_integrations(
|
||||
hass, {*BASE_PLATFORMS, *platform_integrations, *domains_to_setup}
|
||||
),
|
||||
"preload storage",
|
||||
"load translations",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
@@ -886,8 +791,10 @@ async def _async_set_up_integrations(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Set up all the integrations."""
|
||||
setup_started: dict[tuple[str, str | None], float] = {}
|
||||
setup_started: dict[str, float] = {}
|
||||
hass.data[DATA_SETUP_STARTED] = setup_started
|
||||
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||
|
||||
watcher = _WatchPendingSetups(hass, setup_started)
|
||||
watcher.async_start()
|
||||
|
||||
@@ -899,9 +806,10 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in domains_to_setup:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
pre_stage_domains = [
|
||||
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
|
||||
]
|
||||
pre_stage_domains: dict[str, set[str]] = {
|
||||
name: domains_to_setup & domain_group
|
||||
for name, domain_group in SETUP_ORDER.items()
|
||||
}
|
||||
|
||||
# calculate what components to setup in what stage
|
||||
stage_1_domains: set[str] = set()
|
||||
@@ -927,18 +835,10 @@ async def _async_set_up_integrations(
|
||||
|
||||
stage_2_domains = domains_to_setup - stage_1_domains
|
||||
|
||||
for name, domain_group in pre_stage_domains:
|
||||
for name, domain_group in pre_stage_domains.items():
|
||||
if domain_group:
|
||||
stage_2_domains -= domain_group
|
||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||
to_be_loaded = domain_group.copy()
|
||||
to_be_loaded.update(
|
||||
dep
|
||||
for domain in domain_group
|
||||
if (integration := integration_cache.get(domain)) is not None
|
||||
for dep in integration.all_dependencies
|
||||
)
|
||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||
await async_setup_multi_components(hass, domain_group, config)
|
||||
|
||||
# Enables after dependencies when setting up stage 1 domains
|
||||
@@ -987,9 +887,7 @@ async def _async_set_up_integrations(
|
||||
|
||||
watcher.async_stop()
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
setup_time = async_get_setup_timings(hass)
|
||||
_LOGGER.debug(
|
||||
"Integration setup times: %s",
|
||||
dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Integration setup times: %s",
|
||||
dict(sorted(setup_time.items(), key=itemgetter(1))),
|
||||
)
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
}
|
@@ -5,10 +5,5 @@
|
||||
"default": "mdi:robot"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"capture_image": "mdi:camera",
|
||||
"change_setting": "mdi:cog",
|
||||
"trigger_automation": "mdi:play"
|
||||
}
|
||||
}
|
||||
|
@@ -52,7 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
# Remove ozone sensors from registry if they exist
|
||||
ent_reg = er.async_get(hass)
|
||||
for day in range(5):
|
||||
for day in range(0, 5):
|
||||
unique_id = f"{coordinator.location_key}-ozone-{day}"
|
||||
if entity_id := ent_reg.async_get_entity_id(SENSOR_PLATFORM, DOMAIN, unique_id):
|
||||
_LOGGER.debug("Removing ozone sensor entity %s", entity_id)
|
||||
@@ -135,4 +135,4 @@ class AccuWeatherDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
return {**current, ATTR_FORECAST: forecast}
|
||||
return {**current, **{ATTR_FORECAST: forecast}}
|
||||
|
@@ -146,9 +146,9 @@ class AccuWeatherEntity(
|
||||
"""Return the UV index."""
|
||||
return cast(float, self.coordinator.data["UVIndex"])
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
@property
|
||||
def forecast(self) -> list[Forecast] | None:
|
||||
"""Return the forecast array."""
|
||||
if not self.coordinator.forecast:
|
||||
return None
|
||||
# remap keys from library to keys understood by the weather component
|
||||
@@ -177,3 +177,8 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.coordinator.data[ATTR_FORECAST]
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
"""Return the daily forecast in native units."""
|
||||
return self.forecast
|
||||
|
@@ -9,7 +9,7 @@ from typing import Final
|
||||
|
||||
LEASES_REGEX: Final[re.Pattern[str]] = re.compile(
|
||||
r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})"
|
||||
r"\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))"
|
||||
r"\svalid\sfor:\s(?P<timevalid>(-?\d+))"
|
||||
r"\ssec"
|
||||
+ r"\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))"
|
||||
+ r"\svalid\sfor:\s(?P<timevalid>(-?\d+))"
|
||||
+ r"\ssec"
|
||||
)
|
||||
|
@@ -44,7 +44,7 @@ class AdGuardHomeEntity(Entity):
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"write_data_by_name": "mdi:pencil"
|
||||
}
|
||||
}
|
@@ -58,7 +58,7 @@ HVAC_MODES = [
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.DRY,
|
||||
]
|
||||
HVAC_MODES_MYAUTO = [*HVAC_MODES, HVACMode.HEAT_COOL]
|
||||
HVAC_MODES_MYAUTO = HVAC_MODES + [HVACMode.HEAT_COOL]
|
||||
SUPPORTED_FEATURES = (
|
||||
ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
|
@@ -45,7 +45,7 @@ class AdvantageAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
port=port,
|
||||
session=async_get_clientsession(self.hass),
|
||||
retry=ADVANTAGE_AIR_RETRY,
|
||||
).async_get()
|
||||
).async_get(1)
|
||||
except ApiError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"set_time_to": "mdi:timer-cog"
|
||||
}
|
||||
}
|
@@ -121,3 +121,8 @@ FORECAST_MAP = {
|
||||
AOD_WIND_SPEED: ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||
},
|
||||
}
|
||||
|
||||
WEATHER_FORECAST_MODES = {
|
||||
AOD_FORECAST_DAILY: "daily",
|
||||
AOD_FORECAST_HOURLY: "hourly",
|
||||
}
|
||||
|
@@ -14,6 +14,7 @@ from aemet_opendata.const import (
|
||||
)
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
DOMAIN as WEATHER_DOMAIN,
|
||||
Forecast,
|
||||
SingleCoordinatorWeatherEntity,
|
||||
WeatherEntityFeature,
|
||||
@@ -26,6 +27,7 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
@@ -34,6 +36,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
ENTRY_NAME,
|
||||
ENTRY_WEATHER_COORDINATOR,
|
||||
WEATHER_FORECAST_MODES,
|
||||
)
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
@@ -48,14 +51,31 @@ async def async_setup_entry(
|
||||
domain_data = hass.data[DOMAIN][config_entry.entry_id]
|
||||
weather_coordinator = domain_data[ENTRY_WEATHER_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
entities = []
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
# Add daily + hourly entity for legacy config entries, only add daily for new
|
||||
# config entries. This can be removed in HA Core 2024.3
|
||||
if entity_registry.async_get_entity_id(
|
||||
WEATHER_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{config_entry.unique_id} {WEATHER_FORECAST_MODES[AOD_FORECAST_HOURLY]}",
|
||||
):
|
||||
for mode, mode_id in WEATHER_FORECAST_MODES.items():
|
||||
name = f"{domain_data[ENTRY_NAME]} {mode_id}"
|
||||
unique_id = f"{config_entry.unique_id} {mode_id}"
|
||||
entities.append(AemetWeather(name, unique_id, weather_coordinator, mode))
|
||||
else:
|
||||
entities.append(
|
||||
AemetWeather(
|
||||
domain_data[ENTRY_NAME], config_entry.unique_id, weather_coordinator
|
||||
domain_data[ENTRY_NAME],
|
||||
config_entry.unique_id,
|
||||
weather_coordinator,
|
||||
AOD_FORECAST_DAILY,
|
||||
)
|
||||
],
|
||||
False,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities, False)
|
||||
|
||||
|
||||
class AemetWeather(
|
||||
@@ -78,9 +98,14 @@ class AemetWeather(
|
||||
name,
|
||||
unique_id,
|
||||
coordinator: WeatherUpdateCoordinator,
|
||||
forecast_mode,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._forecast_mode = forecast_mode
|
||||
self._attr_entity_registry_enabled_default = (
|
||||
self._forecast_mode == AOD_FORECAST_DAILY
|
||||
)
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@@ -90,6 +115,11 @@ class AemetWeather(
|
||||
cond = self.get_aemet_value([AOD_WEATHER, AOD_CONDITION])
|
||||
return CONDITIONS_MAP.get(cond)
|
||||
|
||||
@property
|
||||
def forecast(self) -> list[Forecast]:
|
||||
"""Return the forecast array."""
|
||||
return self.get_aemet_forecast(self._forecast_mode)
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast]:
|
||||
"""Return the daily forecast in native units."""
|
||||
|
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"start_recording": "mdi:record-rec",
|
||||
"stop_recording": "mdi:stop",
|
||||
"enable_alerts": "mdi:bell-alert",
|
||||
"disable_alerts": "mdi:bell-off",
|
||||
"snapshot": "mdi:camera"
|
||||
}
|
||||
}
|
@@ -17,8 +17,6 @@ from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
|
||||
from . import group as group_pre_import # noqa: F401
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
ATTR_AQI: Final = "air_quality_index"
|
||||
@@ -83,7 +81,7 @@ class AirQualityEntity(Entity):
|
||||
@property
|
||||
def particulate_matter_2_5(self) -> StateType:
|
||||
"""Return the particulate matter 2.5 level."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def particulate_matter_10(self) -> StateType:
|
||||
|
@@ -1,16 +1,12 @@
|
||||
"""Describe group states."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_on_off_states(
|
||||
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
|
||||
hass: HomeAssistant, registry: GroupIntegrationRegistry
|
||||
) -> None:
|
||||
"""Describe group on off states."""
|
||||
registry.exclude_domain()
|
||||
|
@@ -108,7 +108,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
),
|
||||
"illuminance": SensorEntityDescription(
|
||||
key="illuminance",
|
||||
translation_key="illuminance",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -33,9 +33,6 @@
|
||||
},
|
||||
"radon_longterm_level": {
|
||||
"name": "Radon longterm level"
|
||||
},
|
||||
"illuminance": {
|
||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
try:
|
||||
await client.connect_and_stay_connected()
|
||||
except TimeoutError as t:
|
||||
raise ConfigEntryNotReady from t
|
||||
raise ConfigEntryNotReady() from t
|
||||
|
||||
# Store an API object for your platforms to access
|
||||
hass.data[DOMAIN][entry.entry_id] = client
|
||||
|
@@ -162,13 +162,13 @@ def _standardize_geography_config_entry(
|
||||
# about, infer it from the data we have:
|
||||
entry_updates["data"] = {**entry.data}
|
||||
if CONF_CITY in entry.data:
|
||||
entry_updates["data"][CONF_INTEGRATION_TYPE] = (
|
||||
INTEGRATION_TYPE_GEOGRAPHY_NAME
|
||||
)
|
||||
entry_updates["data"][
|
||||
CONF_INTEGRATION_TYPE
|
||||
] = INTEGRATION_TYPE_GEOGRAPHY_NAME
|
||||
else:
|
||||
entry_updates["data"][CONF_INTEGRATION_TYPE] = (
|
||||
INTEGRATION_TYPE_GEOGRAPHY_COORDS
|
||||
)
|
||||
entry_updates["data"][
|
||||
CONF_INTEGRATION_TYPE
|
||||
] = INTEGRATION_TYPE_GEOGRAPHY_COORDS
|
||||
|
||||
if not entry_updates:
|
||||
return
|
||||
|
@@ -54,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
try:
|
||||
await node.async_connect()
|
||||
except NodeProError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady() from err
|
||||
|
||||
reload_task: asyncio.Task | None = None
|
||||
|
||||
|
@@ -44,7 +44,7 @@ class AirzoneEntity(CoordinatorEntity[AirzoneUpdateCoordinator]):
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
"""Return Airzone entity value by key."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class AirzoneSystemEntity(AirzoneEntity):
|
||||
|
@@ -11,7 +11,6 @@ from aioairzone_cloud.const import (
|
||||
API_PARAMS,
|
||||
API_POWER,
|
||||
API_SETPOINT,
|
||||
API_SPEED_CONF,
|
||||
API_UNITS,
|
||||
API_VALUE,
|
||||
AZD_ACTION,
|
||||
@@ -25,8 +24,6 @@ from aioairzone_cloud.const import (
|
||||
AZD_NUM_DEVICES,
|
||||
AZD_NUM_GROUPS,
|
||||
AZD_POWER,
|
||||
AZD_SPEED,
|
||||
AZD_SPEEDS,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_SET,
|
||||
AZD_TEMP_SET_MAX,
|
||||
@@ -37,10 +34,6 @@ from aioairzone_cloud.const import (
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_HVAC_MODE,
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@@ -62,22 +55,6 @@ from .entity import (
|
||||
AirzoneZoneEntity,
|
||||
)
|
||||
|
||||
FAN_SPEED_AUTO: dict[int, str] = {
|
||||
0: FAN_AUTO,
|
||||
}
|
||||
|
||||
FAN_SPEED_MAPS: Final[dict[int, dict[int, str]]] = {
|
||||
2: {
|
||||
1: FAN_LOW,
|
||||
2: FAN_HIGH,
|
||||
},
|
||||
3: {
|
||||
1: FAN_LOW,
|
||||
2: FAN_MEDIUM,
|
||||
3: FAN_HIGH,
|
||||
},
|
||||
}
|
||||
|
||||
HVAC_ACTION_LIB_TO_HASS: Final[dict[OperationAction, HVACAction]] = {
|
||||
OperationAction.COOLING: HVACAction.COOLING,
|
||||
OperationAction.DRYING: HVACAction.DRYING,
|
||||
@@ -298,9 +275,6 @@ class AirzoneDeviceGroupClimate(AirzoneClimate):
|
||||
class AirzoneAidooClimate(AirzoneAidooEntity, AirzoneDeviceClimate):
|
||||
"""Define an Airzone Cloud Aidoo climate."""
|
||||
|
||||
_speeds: dict[int, str]
|
||||
_speeds_reverse: dict[str, int]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
@@ -317,52 +291,9 @@ class AirzoneAidooClimate(AirzoneAidooEntity, AirzoneDeviceClimate):
|
||||
]
|
||||
if HVACMode.OFF not in self._attr_hvac_modes:
|
||||
self._attr_hvac_modes += [HVACMode.OFF]
|
||||
if (
|
||||
self.get_airzone_value(AZD_SPEED) is not None
|
||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||
):
|
||||
self._initialize_fan_speeds()
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
def _initialize_fan_speeds(self) -> None:
|
||||
"""Initialize Aidoo fan speeds."""
|
||||
azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS)
|
||||
max_speed = max(azd_speeds)
|
||||
|
||||
fan_speeds: dict[int, str]
|
||||
if speeds_map := FAN_SPEED_MAPS.get(max_speed):
|
||||
fan_speeds = speeds_map
|
||||
else:
|
||||
fan_speeds = {}
|
||||
|
||||
for speed in azd_speeds:
|
||||
if speed != 0:
|
||||
fan_speeds[speed] = f"{int(round((speed * 100) / max_speed, 0))}%"
|
||||
|
||||
if 0 in azd_speeds:
|
||||
fan_speeds = FAN_SPEED_AUTO | fan_speeds
|
||||
|
||||
self._speeds = {}
|
||||
for key, value in fan_speeds.items():
|
||||
_key = azd_speeds.get(key)
|
||||
if _key is not None:
|
||||
self._speeds[_key] = value
|
||||
|
||||
self._speeds_reverse = {v: k for k, v in self._speeds.items()}
|
||||
self._attr_fan_modes = list(self._speeds_reverse)
|
||||
|
||||
self._attr_supported_features |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set Aidoo fan mode."""
|
||||
params: dict[str, Any] = {
|
||||
API_SPEED_CONF: {
|
||||
API_VALUE: self._speeds_reverse.get(fan_mode),
|
||||
}
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
params: dict[str, Any] = {}
|
||||
@@ -380,14 +311,6 @@ class AirzoneAidooClimate(AirzoneAidooEntity, AirzoneDeviceClimate):
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update Aidoo climate attributes."""
|
||||
super()._async_update_attrs()
|
||||
|
||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||
|
||||
|
||||
class AirzoneGroupClimate(AirzoneGroupEntity, AirzoneDeviceGroupClimate):
|
||||
"""Define an Airzone Cloud Group climate."""
|
||||
|
@@ -33,7 +33,6 @@ from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import group as group_pre_import # noqa: F401
|
||||
from .const import ( # noqa: F401
|
||||
_DEPRECATED_FORMAT_NUMBER,
|
||||
_DEPRECATED_FORMAT_TEXT,
|
||||
@@ -173,7 +172,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
@@ -181,7 +180,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
@@ -189,7 +188,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
@@ -197,7 +196,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
@@ -205,7 +204,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_arm_vacation(self, code: str | None = None) -> None:
|
||||
"""Send arm vacation command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_arm_vacation(self, code: str | None = None) -> None:
|
||||
"""Send arm vacation command."""
|
||||
@@ -213,7 +212,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send alarm trigger command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_trigger(self, code: str | None = None) -> None:
|
||||
"""Send alarm trigger command."""
|
||||
@@ -221,7 +220,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
def alarm_arm_custom_bypass(self, code: str | None = None) -> None:
|
||||
"""Send arm custom bypass command."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_alarm_arm_custom_bypass(self, code: str | None = None) -> None:
|
||||
"""Send arm custom bypass command."""
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Describe group states."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_CUSTOM_BYPASS,
|
||||
@@ -13,13 +12,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_on_off_states(
|
||||
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
|
||||
hass: HomeAssistant, registry: GroupIntegrationRegistry
|
||||
) -> None:
|
||||
"""Describe group on off states."""
|
||||
registry.on_off_states(
|
||||
|
@@ -18,9 +18,9 @@
|
||||
"alarm_arm_away": "mdi:shield-lock",
|
||||
"alarm_arm_home": "mdi:shield-home",
|
||||
"alarm_arm_night": "mdi:shield-moon",
|
||||
"alarm_arm_custom_bypass": "mdi:security",
|
||||
"alarm_custom_bypass": "mdi:security",
|
||||
"alarm_disarm": "mdi:shield-off",
|
||||
"alarm_trigger": "mdi:bell-ring",
|
||||
"alarm_arm_vacation": "mdi:shield-airplane"
|
||||
"arlam_arm_vacation": "mdi:shield-airplane"
|
||||
}
|
||||
}
|
||||
|
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"toggle": "mdi:bell-ring",
|
||||
"turn_off": "mdi:bell-off",
|
||||
"turn_on": "mdi:bell-alert"
|
||||
}
|
||||
}
|
@@ -1584,7 +1584,7 @@ class AlexaModeController(AlexaCapability):
|
||||
)
|
||||
modes += 1
|
||||
|
||||
# Alexa requires at least 2 modes
|
||||
# Alexa requiers at least 2 modes
|
||||
if modes == 1:
|
||||
self._resource.add_mode(f"state.{PRESET_MODE_NA}", [PRESET_MODE_NA])
|
||||
|
||||
|
@@ -50,6 +50,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
Right now this module does not expose any, but the intent component breaks
|
||||
without it.
|
||||
"""
|
||||
pass # pylint: disable=unnecessary-pass
|
||||
|
||||
|
||||
class UnknownRequest(HomeAssistantError):
|
||||
|
@@ -224,7 +224,7 @@ class AlexaCapabilityResource:
|
||||
|
||||
Return ModeResources, PresetResources friendlyNames serialized.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
def serialize_labels(self, resources: list[str]) -> dict[str, list[dict[str, Any]]]:
|
||||
"""Return serialized labels for an API response.
|
||||
|
@@ -98,16 +98,16 @@ async def async_setup_entry(
|
||||
_LOGGER.error("No devices found")
|
||||
return
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(heater.update_device_info())
|
||||
for heater in data_connection.get_devices()
|
||||
]
|
||||
tasks = []
|
||||
for heater in data_connection.get_devices():
|
||||
tasks.append(asyncio.create_task(heater.update_device_info()))
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async_add_entities(
|
||||
(AmbiclimateEntity(heater, store) for heater in data_connection.get_devices()),
|
||||
True,
|
||||
)
|
||||
devs = []
|
||||
for heater in data_connection.get_devices():
|
||||
devs.append(AmbiclimateEntity(heater, store))
|
||||
|
||||
async_add_entities(devs, True)
|
||||
|
||||
async def send_comfort_feedback(service: ServiceCall) -> None:
|
||||
"""Send comfort feedback."""
|
||||
|
@@ -211,10 +211,9 @@ class AmcrestChecker(ApiWrapper):
|
||||
self, *args: Any, **kwargs: Any
|
||||
) -> AsyncIterator[httpx.Response]:
|
||||
"""amcrest.ApiWrapper.command wrapper to catch errors."""
|
||||
async with (
|
||||
self._async_command_wrapper(),
|
||||
super().async_stream_command(*args, **kwargs) as ret,
|
||||
):
|
||||
async with self._async_command_wrapper(), super().async_stream_command(
|
||||
*args, **kwargs
|
||||
) as ret:
|
||||
yield ret
|
||||
|
||||
@asynccontextmanager
|
||||
|
@@ -13,11 +13,15 @@ from amcrest import AmcrestError
|
||||
from haffmpeg.camera import CameraMjpeg
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera, CameraEntityFeature
|
||||
from homeassistant.components.camera import (
|
||||
DOMAIN as CAMERA_DOMAIN,
|
||||
Camera,
|
||||
CameraEntityFeature,
|
||||
)
|
||||
from homeassistant.components.ffmpeg import FFmpegManager, get_ffmpeg_manager
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_aiohttp_proxy_stream,
|
||||
async_aiohttp_proxy_web,
|
||||
@@ -33,6 +37,7 @@ from .const import (
|
||||
COMM_TIMEOUT,
|
||||
DATA_AMCREST,
|
||||
DEVICES,
|
||||
DOMAIN,
|
||||
RESOLUTION_TO_STREAM,
|
||||
SERVICE_UPDATE,
|
||||
SNAPSHOT_TIMEOUT,
|
||||
@@ -136,6 +141,18 @@ async def async_setup_platform(
|
||||
device = hass.data[DATA_AMCREST][DEVICES][name]
|
||||
entity = AmcrestCam(name, device, get_ffmpeg_manager(hass))
|
||||
|
||||
# 2021.9.0 introduced unique id's for the camera entity, but these were not
|
||||
# unique for different resolution streams. If any cameras were configured
|
||||
# with this version, update the old entity with the new unique id.
|
||||
serial_number = await device.api.async_serial_number
|
||||
serial_number = serial_number.strip()
|
||||
registry = er.async_get(hass)
|
||||
entity_id = registry.async_get_entity_id(CAMERA_DOMAIN, DOMAIN, serial_number)
|
||||
if entity_id is not None:
|
||||
_LOGGER.debug("Updating unique id for camera %s", entity_id)
|
||||
new_unique_id = f"{serial_number}-{device.resolution}-{device.channel}"
|
||||
registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
||||
async_add_entities([entity], True)
|
||||
|
||||
|
||||
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"enable_recording": "mdi:record-rec",
|
||||
"disable_recording": "mdi:stop",
|
||||
"enable_audio": "mdi:volume-high",
|
||||
"disable_audio": "mdi:volume-off",
|
||||
"enable_motion_recording": "mdi:motion-sensor",
|
||||
"disable_motion_recording": "mdi:motion-sensor-off",
|
||||
"goto_preset": "mdi:pan",
|
||||
"set_color_bw": "mdi:palette",
|
||||
"start_tour": "mdi:panorama",
|
||||
"stop_tour": "mdi:panorama-outline",
|
||||
"ptz_control": "mdi:pan"
|
||||
}
|
||||
}
|
@@ -108,21 +108,21 @@ class AmcrestSensor(SensorEntity):
|
||||
elif sensor_type == SENSOR_SDCARD:
|
||||
storage = await self._api.async_storage_all
|
||||
try:
|
||||
self._attr_extra_state_attributes["Total"] = (
|
||||
f"{storage['total'][0]:.2f} {storage['total'][1]}"
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
"Total"
|
||||
] = f"{storage['total'][0]:.2f} {storage['total'][1]}"
|
||||
except ValueError:
|
||||
self._attr_extra_state_attributes["Total"] = (
|
||||
f"{storage['total'][0]} {storage['total'][1]}"
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
"Total"
|
||||
] = f"{storage['total'][0]} {storage['total'][1]}"
|
||||
try:
|
||||
self._attr_extra_state_attributes["Used"] = (
|
||||
f"{storage['used'][0]:.2f} {storage['used'][1]}"
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
"Used"
|
||||
] = f"{storage['used'][0]:.2f} {storage['used'][1]}"
|
||||
except ValueError:
|
||||
self._attr_extra_state_attributes["Used"] = (
|
||||
f"{storage['used'][0]} {storage['used'][1]}"
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
"Used"
|
||||
] = f"{storage['used'][0]} {storage['used'][1]}"
|
||||
try:
|
||||
self._attr_native_value = f"{storage['used_percent']:.2f}"
|
||||
except ValueError:
|
||||
|
@@ -3,59 +3,46 @@ enable_recording:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
disable_recording:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
enable_audio:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
disable_audio:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
enable_motion_recording:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
disable_motion_recording:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
goto_preset:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
@@ -72,9 +59,7 @@ set_color_bw:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
color_bw:
|
||||
selector:
|
||||
select:
|
||||
@@ -88,27 +73,21 @@ start_tour:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
stop_tour:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
|
||||
ptz_control:
|
||||
fields:
|
||||
entity_id:
|
||||
example: "camera.house_front"
|
||||
selector:
|
||||
entity:
|
||||
integration: amcrest
|
||||
domain: camera
|
||||
text:
|
||||
movement:
|
||||
required: true
|
||||
selector:
|
||||
|
@@ -52,7 +52,7 @@ class IPWebcamBinarySensor(AndroidIPCamBaseEntity, BinarySensorEntity):
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return availability if setting is enabled."""
|
||||
"""Return avaibility if setting is enabled."""
|
||||
return MOTION_ACTIVE in self.cam.enabled_sensors and super().available
|
||||
|
||||
@property
|
||||
|
@@ -132,7 +132,7 @@ async def async_setup_entry(
|
||||
sensor
|
||||
for sensor in SENSOR_TYPES
|
||||
if sensor.key
|
||||
in [*coordinator.cam.enabled_sensors, "audio_connections", "video_connections"]
|
||||
in coordinator.cam.enabled_sensors + ["audio_connections", "video_connections"]
|
||||
]
|
||||
async_add_entities(
|
||||
IPWebcamSensor(coordinator, description) for description in sensor_types
|
||||
|
@@ -235,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
apps = [SelectOptionDict(value=APPS_NEW_ID, label="Add new")] + [
|
||||
SelectOptionDict(value=k, label=v) for k, v in apps_list.items()
|
||||
]
|
||||
rules = [RULES_NEW_ID, *self._state_det_rules]
|
||||
rules = [RULES_NEW_ID] + list(self._state_det_rules)
|
||||
options = self.options
|
||||
|
||||
data_schema = vol.Schema(
|
||||
|
@@ -56,7 +56,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_DEVICES: serialize_device_list(devices),
|
||||
**{CONF_DEVICES: serialize_device_list(devices)},
|
||||
},
|
||||
)
|
||||
coordinators = [AnovaCoordinator(hass, device) for device in devices]
|
||||
|
@@ -1,23 +0,0 @@
|
||||
"""Diagnostics support for APCUPSD."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import APCUPSdCoordinator, APCUPSdData
|
||||
|
||||
TO_REDACT = {"SERIALNO", "HOSTNAME"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: APCUPSdCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
data: APCUPSdData = coordinator.data
|
||||
return async_redact_data(data, TO_REDACT)
|
@@ -137,7 +137,7 @@ class APIEventStream(HomeAssistantView):
|
||||
|
||||
restrict: list[str] | None = None
|
||||
if restrict_str := request.query.get("restrict"):
|
||||
restrict = [*restrict_str.split(","), EVENT_HOMEASSISTANT_STOP]
|
||||
restrict = restrict_str.split(",") + [EVENT_HOMEASSISTANT_STOP]
|
||||
|
||||
async def forward_events(event: Event) -> None:
|
||||
"""Forward events to the open request."""
|
||||
@@ -413,7 +413,7 @@ class APIDomainServicesView(HomeAssistantView):
|
||||
)
|
||||
)
|
||||
except (vol.Invalid, ServiceNotFound) as ex:
|
||||
raise HTTPBadRequest from ex
|
||||
raise HTTPBadRequest() from ex
|
||||
finally:
|
||||
cancel_listen()
|
||||
|
||||
|
@@ -54,25 +54,6 @@ SIGNAL_DISCONNECTED = "apple_tv_disconnected"
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry for Apple TV."""
|
||||
@@ -83,13 +64,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
await manager.async_first_connect()
|
||||
except AUTH_EXCEPTIONS as ex:
|
||||
except (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
) as ex:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"{address}: Authentication failed, try reconfiguring device: {ex}"
|
||||
) from ex
|
||||
except CONNECTION_TIMEOUT_EXCEPTIONS as ex:
|
||||
except (
|
||||
asyncio.CancelledError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
) as ex:
|
||||
raise ConfigEntryNotReady(f"{address}: {ex}") from ex
|
||||
except DEVICE_EXCEPTIONS as ex:
|
||||
except (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
) as ex:
|
||||
_LOGGER.debug(
|
||||
"Error setting up apple_tv at %s: %s", address, ex, exc_info=ex
|
||||
)
|
||||
@@ -102,9 +97,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await manager.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, on_hass_stop, run_immediately=True
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -341,7 +341,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass, self.scan_filter, self.hass.loop
|
||||
)
|
||||
if not self.atv:
|
||||
raise DeviceNotFound
|
||||
raise DeviceNotFound()
|
||||
|
||||
# Protocols supported by the device are prospects for pairing
|
||||
self.protocols_to_pair = deque(
|
||||
@@ -384,7 +384,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass.config_entries.async_reload(entry.entry_id)
|
||||
)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
raise DeviceAlreadyConfigured()
|
||||
|
||||
async def async_step_confirm(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
|
@@ -153,9 +153,7 @@ class AppleTvMediaPlayer(
|
||||
atv.audio.listener = self
|
||||
|
||||
if atv.features.in_state(FeatureState.Available, FeatureName.AppList):
|
||||
self.manager.config_entry.async_create_task(
|
||||
self.hass, self._update_app_list(), eager_start=True
|
||||
)
|
||||
self.hass.create_task(self._update_app_list())
|
||||
|
||||
async def _update_app_list(self) -> None:
|
||||
_LOGGER.debug("Updating app list")
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"requirements": ["apprise==1.7.4"]
|
||||
"requirements": ["apprise==1.7.2"]
|
||||
}
|
||||
|
@@ -51,7 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
coordinator.stop_listen()
|
||||
|
||||
raise ConfigEntryNotReady
|
||||
raise ConfigEntryNotReady()
|
||||
|
||||
await coordinator.wait_for_ready(ready_callback)
|
||||
|
||||
|
@@ -33,14 +33,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (
|
||||
PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=_service_info_to_adv,
|
||||
)
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
] = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=_service_info_to_adv,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
|
@@ -23,8 +23,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DEVICES_REGEX = re.compile(
|
||||
r"(?P<name>([^\s]+)?)\s+"
|
||||
r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s+"
|
||||
r"(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))\s+"
|
||||
+ r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s+"
|
||||
+ r"(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))\s+"
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
|
||||
|
@@ -754,9 +754,9 @@ class PipelineRun:
|
||||
raise DuplicateWakeUpDetectedError(result.wake_word_phrase)
|
||||
|
||||
# Record last wake up time to block duplicate detections
|
||||
self.hass.data[DATA_LAST_WAKE_UP][result.wake_word_phrase] = (
|
||||
time.monotonic()
|
||||
)
|
||||
self.hass.data[DATA_LAST_WAKE_UP][
|
||||
result.wake_word_phrase
|
||||
] = time.monotonic()
|
||||
|
||||
if result.queued_audio:
|
||||
# Add audio that was pending at detection.
|
||||
@@ -1375,9 +1375,9 @@ class PipelineInput:
|
||||
raise DuplicateWakeUpDetectedError(self.wake_word_phrase)
|
||||
|
||||
# Record last wake up time to block duplicate detections
|
||||
self.run.hass.data[DATA_LAST_WAKE_UP][self.wake_word_phrase] = (
|
||||
time.monotonic()
|
||||
)
|
||||
self.run.hass.data[DATA_LAST_WAKE_UP][
|
||||
self.wake_word_phrase
|
||||
] = time.monotonic()
|
||||
|
||||
stt_input_stream = stt_processed_stream
|
||||
|
||||
|
@@ -101,9 +101,9 @@ class AsusWrtDevice(ScannerEntity):
|
||||
self._device = self._router.devices[self._device.mac]
|
||||
self._attr_extra_state_attributes = {}
|
||||
if self._device.last_activity:
|
||||
self._attr_extra_state_attributes[ATTR_LAST_TIME_REACHABLE] = (
|
||||
self._device.last_activity.isoformat(timespec="seconds")
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
ATTR_LAST_TIME_REACHABLE
|
||||
] = self._device.last_activity.isoformat(timespec="seconds")
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
@@ -141,9 +141,9 @@ class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity):
|
||||
ATTR_BATTERY_LEVEL: self._detail.battery_level
|
||||
}
|
||||
if self._detail.keypad is not None:
|
||||
self._attr_extra_state_attributes["keypad_battery_level"] = (
|
||||
self._detail.keypad.battery_level
|
||||
)
|
||||
self._attr_extra_state_attributes[
|
||||
"keypad_battery_level"
|
||||
] = self._detail.keypad.battery_level
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log."""
|
||||
|
@@ -66,9 +66,7 @@ class AugustSubscriberMixin:
|
||||
self._unsub_interval()
|
||||
|
||||
self._stop_interval = self._hass.bus.async_listen(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
_async_cancel_update_interval,
|
||||
run_immediately=True,
|
||||
EVENT_HOMEASSISTANT_STOP, _async_cancel_update_interval
|
||||
)
|
||||
|
||||
@callback
|
||||
|
@@ -12,12 +12,14 @@
|
||||
|
||||
import logging
|
||||
|
||||
from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AuroraAbbDataUpdateCoordinator
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
@@ -48,3 +50,60 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]): # pylint: disable=hass-enforce-coordinator-module
|
||||
"""Class to manage fetching AuroraAbbPowerone data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, comport: str, address: int) -> None:
|
||||
"""Initialize the data update coordinator."""
|
||||
self.available_prev = False
|
||||
self.available = False
|
||||
self.client = AuroraSerialClient(address, comport, parity="N", timeout=1)
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
|
||||
|
||||
def _update_data(self) -> dict[str, float]:
|
||||
"""Fetch new state data for the sensor.
|
||||
|
||||
This is the only function that should fetch new data for Home Assistant.
|
||||
"""
|
||||
data: dict[str, float] = {}
|
||||
self.available_prev = self.available
|
||||
try:
|
||||
self.client.connect()
|
||||
|
||||
# read ADC channel 3 (grid power output)
|
||||
power_watts = self.client.measure(3, True)
|
||||
temperature_c = self.client.measure(21)
|
||||
energy_wh = self.client.cumulated_energy(5)
|
||||
[alarm, *_] = self.client.alarms()
|
||||
except AuroraTimeoutError:
|
||||
self.available = False
|
||||
_LOGGER.debug("No response from inverter (could be dark)")
|
||||
except AuroraError as error:
|
||||
self.available = False
|
||||
raise error
|
||||
else:
|
||||
data["instantaneouspower"] = round(power_watts, 1)
|
||||
data["temp"] = round(temperature_c, 1)
|
||||
data["totalenergy"] = round(energy_wh / 1000, 2)
|
||||
data["alarm"] = alarm
|
||||
self.available = True
|
||||
|
||||
finally:
|
||||
if self.available != self.available_prev:
|
||||
if self.available:
|
||||
_LOGGER.info("Communication with %s back online", self.name)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Communication with %s lost",
|
||||
self.name,
|
||||
)
|
||||
if self.client.serline.isOpen():
|
||||
self.client.close()
|
||||
|
||||
return data
|
||||
|
||||
async def _async_update_data(self) -> dict[str, float]:
|
||||
"""Update inverter data in the executor."""
|
||||
return await self.hass.async_add_executor_job(self._update_data)
|
||||
|
@@ -1,94 +0,0 @@
|
||||
"""DataUpdateCoordinator for the aurora_abb_powerone integration."""
|
||||
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError
|
||||
from serial import SerialException
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]): # pylint: disable=hass-enforce-coordinator-module
|
||||
"""Class to manage fetching AuroraAbbPowerone data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, comport: str, address: int) -> None:
|
||||
"""Initialize the data update coordinator."""
|
||||
self.available_prev = False
|
||||
self.available = False
|
||||
self.client = AuroraSerialClient(address, comport, parity="N", timeout=1)
|
||||
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
|
||||
|
||||
def _update_data(self) -> dict[str, float]:
|
||||
"""Fetch new state data for the sensors.
|
||||
|
||||
This is the only function that should fetch new data for Home Assistant.
|
||||
"""
|
||||
data: dict[str, float] = {}
|
||||
self.available_prev = self.available
|
||||
retries: int = 3
|
||||
while retries > 0:
|
||||
try:
|
||||
self.client.connect()
|
||||
|
||||
# See command 59 in the protocol manual linked in __init__.py
|
||||
grid_voltage = self.client.measure(1, True)
|
||||
grid_current = self.client.measure(2, True)
|
||||
power_watts = self.client.measure(3, True)
|
||||
frequency = self.client.measure(4)
|
||||
i_leak_dcdc = self.client.measure(6)
|
||||
i_leak_inverter = self.client.measure(7)
|
||||
temperature_c = self.client.measure(21)
|
||||
r_iso = self.client.measure(30)
|
||||
energy_wh = self.client.cumulated_energy(5)
|
||||
[alarm, *_] = self.client.alarms()
|
||||
except AuroraTimeoutError:
|
||||
self.available = False
|
||||
_LOGGER.debug("No response from inverter (could be dark)")
|
||||
retries = 0
|
||||
except (SerialException, AuroraError) as error:
|
||||
self.available = False
|
||||
retries -= 1
|
||||
if retries <= 0:
|
||||
raise UpdateFailed(error) from error
|
||||
_LOGGER.debug(
|
||||
"Exception: %s occurred, %d retries remaining",
|
||||
repr(error),
|
||||
retries,
|
||||
)
|
||||
sleep(1)
|
||||
else:
|
||||
data["grid_voltage"] = round(grid_voltage, 1)
|
||||
data["grid_current"] = round(grid_current, 1)
|
||||
data["instantaneouspower"] = round(power_watts, 1)
|
||||
data["grid_frequency"] = round(frequency, 1)
|
||||
data["i_leak_dcdc"] = i_leak_dcdc
|
||||
data["i_leak_inverter"] = i_leak_inverter
|
||||
data["temp"] = round(temperature_c, 1)
|
||||
data["r_iso"] = r_iso
|
||||
data["totalenergy"] = round(energy_wh / 1000, 2)
|
||||
data["alarm"] = alarm
|
||||
self.available = True
|
||||
retries = 0
|
||||
finally:
|
||||
if self.available != self.available_prev:
|
||||
if self.available:
|
||||
_LOGGER.info("Communication with %s back online", self.name)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Communication with %s lost",
|
||||
self.name,
|
||||
)
|
||||
if self.client.serline.isOpen():
|
||||
self.client.close()
|
||||
|
||||
return data
|
||||
|
||||
async def _async_update_data(self) -> dict[str, float]:
|
||||
"""Update inverter data in the executor."""
|
||||
return await self.hass.async_add_executor_job(self._update_data)
|
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"r_iso": {
|
||||
"default": "mdi:omega"
|
||||
},
|
||||
"alarm": {
|
||||
"default": "mdi:alert-octagon"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -18,10 +18,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_SERIAL_NUMBER,
|
||||
EntityCategory,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfEnergy,
|
||||
UnitOfFrequency,
|
||||
UnitOfPower,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
@@ -45,50 +42,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
ALARM_STATES = list(AuroraMapping.ALARM_STATES.values())
|
||||
|
||||
SENSOR_TYPES = [
|
||||
SensorEntityDescription(
|
||||
key="grid_voltage",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="grid_voltage",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="grid_current",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="grid_current",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="grid_frequency",
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="i_leak_dcdc",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="i_leak_dcdc",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="i_leak_inverter",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="i_leak_inverter",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="alarm",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
@@ -110,14 +63,6 @@ SENSOR_TYPES = [
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="r_iso",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement="MOhms",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="r_iso",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="totalenergy",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
|
@@ -21,29 +21,14 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"grid_voltage": {
|
||||
"name": "Grid voltage"
|
||||
},
|
||||
"grid_current": {
|
||||
"name": "Grid current"
|
||||
},
|
||||
"alarm": {
|
||||
"name": "Alarm status"
|
||||
},
|
||||
"power_output": {
|
||||
"name": "Power output"
|
||||
},
|
||||
"i_leak_dcdc": {
|
||||
"name": "DC-DC leak current"
|
||||
},
|
||||
"i_leak_inverter": {
|
||||
"name": "Inverter leak current"
|
||||
},
|
||||
"total_energy": {
|
||||
"name": "Total energy"
|
||||
},
|
||||
"r_iso": {
|
||||
"name": "Isolation resistance"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -33,15 +33,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
# Ignore services that don't support usage data
|
||||
ignore_types = [*FETCH_TYPES, "Hardware"]
|
||||
ignore_types = FETCH_TYPES + ["Hardware"]
|
||||
|
||||
try:
|
||||
await client.login()
|
||||
services = await client.get_services(drop_types=ignore_types)
|
||||
except AuthenticationException as exc:
|
||||
raise ConfigEntryAuthFailed from exc
|
||||
raise ConfigEntryAuthFailed() from exc
|
||||
except ClientError as exc:
|
||||
raise ConfigEntryNotReady from exc
|
||||
raise ConfigEntryNotReady() from exc
|
||||
|
||||
# Create an appropriate refresh function
|
||||
def update_data_factory(service_id):
|
||||
|
@@ -92,10 +92,9 @@ async def fetch_redirect_uris(hass: HomeAssistant, url: str) -> list[str]:
|
||||
parser = LinkTagParser("redirect_uri")
|
||||
chunks = 0
|
||||
try:
|
||||
async with (
|
||||
aiohttp.ClientSession() as session,
|
||||
session.get(url, timeout=5) as resp,
|
||||
):
|
||||
async with aiohttp.ClientSession() as session, session.get(
|
||||
url, timeout=5
|
||||
) as resp:
|
||||
async for data in resp.content.iter_chunked(1024):
|
||||
parser.feed(data.decode())
|
||||
chunks += 1
|
||||
@@ -159,7 +158,7 @@ def _parse_client_id(client_id: str) -> ParseResult:
|
||||
# Client identifier URLs
|
||||
# MUST have either an https or http scheme
|
||||
if parts.scheme not in ("http", "https"):
|
||||
raise ValueError
|
||||
raise ValueError()
|
||||
|
||||
# MUST contain a path component
|
||||
# Handled by url canonicalization.
|
||||
@@ -184,7 +183,7 @@ def _parse_client_id(client_id: str) -> ParseResult:
|
||||
# MAY contain a port
|
||||
try:
|
||||
# parts raises ValueError when port cannot be parsed as int
|
||||
_ = parts.port
|
||||
parts.port
|
||||
except ValueError as ex:
|
||||
raise ValueError("Client ID contains invalid port") from ex
|
||||
|
||||
|
@@ -233,30 +233,6 @@ def areas_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||
return _x_in_automation(hass, entity_id, "referenced_areas")
|
||||
|
||||
|
||||
@callback
|
||||
def automations_with_floor(hass: HomeAssistant, floor_id: str) -> list[str]:
|
||||
"""Return all automations that reference the floor."""
|
||||
return _automations_with_x(hass, floor_id, "referenced_floors")
|
||||
|
||||
|
||||
@callback
|
||||
def floors_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||
"""Return all floors in an automation."""
|
||||
return _x_in_automation(hass, entity_id, "referenced_floors")
|
||||
|
||||
|
||||
@callback
|
||||
def automations_with_label(hass: HomeAssistant, label_id: str) -> list[str]:
|
||||
"""Return all automations that reference the label."""
|
||||
return _automations_with_x(hass, label_id, "referenced_labels")
|
||||
|
||||
|
||||
@callback
|
||||
def labels_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||
"""Return all labels in an automation."""
|
||||
return _x_in_automation(hass, entity_id, "referenced_labels")
|
||||
|
||||
|
||||
@callback
|
||||
def automations_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[str]:
|
||||
"""Return all automations that reference the blueprint."""
|
||||
@@ -298,13 +274,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
await _async_process_config(hass, config, component)
|
||||
|
||||
# Add some default blueprints to blueprints/automation, does nothing
|
||||
# if blueprints/automation already exists but still has to create
|
||||
# an executor job to check if the folder exists so we run it in a
|
||||
# separate task to avoid waiting for it to finish setting up
|
||||
# since a tracked task will be waited at the end of startup
|
||||
hass.async_create_task(
|
||||
async_get_blueprints(hass).async_populate(), eager_start=True
|
||||
)
|
||||
# if blueprints/automation already exists
|
||||
await async_get_blueprints(hass).async_populate()
|
||||
|
||||
async def trigger_service_handler(
|
||||
entity: BaseAutomationEntity, service_call: ServiceCall
|
||||
@@ -370,16 +341,6 @@ class BaseAutomationEntity(ToggleEntity, ABC):
|
||||
return {CONF_ID: self.unique_id}
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
@abstractmethod
|
||||
def referenced_labels(self) -> set[str]:
|
||||
"""Return a set of referenced labels."""
|
||||
|
||||
@cached_property
|
||||
@abstractmethod
|
||||
def referenced_floors(self) -> set[str]:
|
||||
"""Return a set of referenced floors."""
|
||||
|
||||
@cached_property
|
||||
@abstractmethod
|
||||
def referenced_areas(self) -> set[str]:
|
||||
@@ -413,7 +374,7 @@ class BaseAutomationEntity(ToggleEntity, ABC):
|
||||
class UnavailableAutomationEntity(BaseAutomationEntity):
|
||||
"""A non-functional automation entity with its state set to unavailable.
|
||||
|
||||
This class is instantiated when an automation fails to validate.
|
||||
This class is instatiated when an automation fails to validate.
|
||||
"""
|
||||
|
||||
_attr_should_poll = False
|
||||
@@ -435,16 +396,6 @@ class UnavailableAutomationEntity(BaseAutomationEntity):
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@cached_property
|
||||
def referenced_labels(self) -> set[str]:
|
||||
"""Return a set of referenced labels."""
|
||||
return set()
|
||||
|
||||
@cached_property
|
||||
def referenced_floors(self) -> set[str]:
|
||||
"""Return a set of referenced floors."""
|
||||
return set()
|
||||
|
||||
@cached_property
|
||||
def referenced_areas(self) -> set[str]:
|
||||
"""Return a set of referenced areas."""
|
||||
@@ -532,16 +483,6 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return True if entity is on."""
|
||||
return self._async_detach_triggers is not None or self._is_enabled
|
||||
|
||||
@property
|
||||
def referenced_labels(self) -> set[str]:
|
||||
"""Return a set of referenced labels."""
|
||||
return self.action_script.referenced_labels
|
||||
|
||||
@property
|
||||
def referenced_floors(self) -> set[str]:
|
||||
"""Return a set of referenced floors."""
|
||||
return self.action_script.referenced_floors
|
||||
|
||||
@cached_property
|
||||
def referenced_areas(self) -> set[str]:
|
||||
"""Return a set of referenced areas."""
|
||||
|
@@ -122,9 +122,9 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
for flow in self._async_in_progress():
|
||||
if flow["context"]["source"] == SOURCE_ZEROCONF:
|
||||
info = flow["context"]["title_placeholders"]
|
||||
entries[flow["context"]["host"]] = (
|
||||
f"{info['model']} ({info['device_id']})"
|
||||
)
|
||||
entries[
|
||||
flow["context"]["host"]
|
||||
] = f"{info['model']} ({info['device_id']})"
|
||||
return entries
|
||||
|
||||
async def async_step_local(
|
||||
|
@@ -230,14 +230,15 @@ class AWSSQS(AWSNotify):
|
||||
async with self.session.create_client(
|
||||
self.service, **self.aws_config
|
||||
) as client:
|
||||
tasks = [
|
||||
client.send_message(
|
||||
QueueUrl=target,
|
||||
MessageBody=json_body,
|
||||
MessageAttributes=message_attributes,
|
||||
tasks = []
|
||||
for target in kwargs.get(ATTR_TARGET, []):
|
||||
tasks.append(
|
||||
client.send_message(
|
||||
QueueUrl=target,
|
||||
MessageBody=json_body,
|
||||
MessageAttributes=message_attributes,
|
||||
)
|
||||
)
|
||||
for target in kwargs.get(ATTR_TARGET, [])
|
||||
]
|
||||
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks)
|
||||
|
@@ -3,175 +3,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from axis.interfaces.applications.fence_guard import FenceGuardHandler
|
||||
from axis.interfaces.applications.loitering_guard import LoiteringGuardHandler
|
||||
from axis.interfaces.applications.motion_guard import MotionGuardHandler
|
||||
from axis.interfaces.applications.vmd4 import Vmd4Handler
|
||||
from axis.models.event import Event, EventTopic
|
||||
from axis.models.event import Event, EventGroup, EventOperation, EventTopic
|
||||
from axis.vapix.interfaces.applications.fence_guard import FenceGuardHandler
|
||||
from axis.vapix.interfaces.applications.loitering_guard import LoiteringGuardHandler
|
||||
from axis.vapix.interfaces.applications.motion_guard import MotionGuardHandler
|
||||
from axis.vapix.interfaces.applications.vmd4 import Vmd4Handler
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .entity import AxisEventDescription, AxisEventEntity
|
||||
from .entity import AxisEventEntity
|
||||
from .hub import AxisHub
|
||||
|
||||
DEVICE_CLASS = {
|
||||
EventGroup.INPUT: BinarySensorDeviceClass.CONNECTIVITY,
|
||||
EventGroup.LIGHT: BinarySensorDeviceClass.LIGHT,
|
||||
EventGroup.MOTION: BinarySensorDeviceClass.MOTION,
|
||||
EventGroup.SOUND: BinarySensorDeviceClass.SOUND,
|
||||
}
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AxisBinarySensorDescription(AxisEventDescription, BinarySensorEntityDescription):
|
||||
"""Axis binary sensor entity description."""
|
||||
|
||||
|
||||
@callback
|
||||
def event_id_is_int(event_id: str) -> bool:
|
||||
"""Make sure event ID is int."""
|
||||
try:
|
||||
_ = int(event_id)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def guard_suite_supported_fn(hub: AxisHub, event: Event) -> bool:
|
||||
"""Validate event ID is int."""
|
||||
_, _, profile_id = event.id.partition("Profile")
|
||||
return event_id_is_int(profile_id)
|
||||
|
||||
|
||||
@callback
|
||||
def object_analytics_supported_fn(hub: AxisHub, event: Event) -> bool:
|
||||
"""Validate event ID is int."""
|
||||
_, _, profile_id = event.id.partition("Scenario")
|
||||
return event_id_is_int(profile_id)
|
||||
|
||||
|
||||
@callback
|
||||
def guard_suite_name_fn(
|
||||
handler: FenceGuardHandler
|
||||
| LoiteringGuardHandler
|
||||
| MotionGuardHandler
|
||||
| Vmd4Handler,
|
||||
event: Event,
|
||||
event_type: str,
|
||||
) -> str:
|
||||
"""Get guard suite item name."""
|
||||
if handler.initialized and (profiles := handler["0"].profiles):
|
||||
for profile_id, profile in profiles.items():
|
||||
camera_id = profile.camera
|
||||
if event.id == f"Camera{camera_id}Profile{profile_id}":
|
||||
return f"{event_type} {profile.name}"
|
||||
return ""
|
||||
|
||||
|
||||
@callback
|
||||
def fence_guard_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Fence guard name."""
|
||||
return guard_suite_name_fn(hub.api.vapix.fence_guard, event, "Fence Guard")
|
||||
|
||||
|
||||
@callback
|
||||
def loitering_guard_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Loitering guard name."""
|
||||
return guard_suite_name_fn(hub.api.vapix.loitering_guard, event, "Loitering Guard")
|
||||
|
||||
|
||||
@callback
|
||||
def motion_guard_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Motion guard name."""
|
||||
return guard_suite_name_fn(hub.api.vapix.motion_guard, event, "Motion Guard")
|
||||
|
||||
|
||||
@callback
|
||||
def motion_detection_4_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Motion detection 4 name."""
|
||||
return guard_suite_name_fn(hub.api.vapix.vmd4, event, "VMD4")
|
||||
|
||||
|
||||
@callback
|
||||
def object_analytics_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Get object analytics name."""
|
||||
if hub.api.vapix.object_analytics.initialized and (
|
||||
scenarios := hub.api.vapix.object_analytics["0"].scenarios
|
||||
):
|
||||
for scenario_id, scenario in scenarios.items():
|
||||
device_id = scenario.devices[0]["id"]
|
||||
if event.id == f"Device{device_id}Scenario{scenario_id}":
|
||||
return f"Object Analytics {scenario.name}"
|
||||
return ""
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
AxisBinarySensorDescription(
|
||||
key="Input port state",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
event_topic=(EventTopic.PORT_INPUT, EventTopic.PORT_SUPERVISED_INPUT),
|
||||
name_fn=lambda hub, event: hub.api.vapix.ports[event.id].name,
|
||||
supported_fn=lambda hub, event: event_id_is_int(event.id),
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Day/Night vision state",
|
||||
device_class=BinarySensorDeviceClass.LIGHT,
|
||||
event_topic=EventTopic.DAY_NIGHT_VISION,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Sound trigger state",
|
||||
device_class=BinarySensorDeviceClass.SOUND,
|
||||
event_topic=EventTopic.SOUND_TRIGGER_LEVEL,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Motion sensors state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=(
|
||||
EventTopic.PIR,
|
||||
EventTopic.MOTION_DETECTION,
|
||||
EventTopic.MOTION_DETECTION_3,
|
||||
),
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Motion detection 4 state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=EventTopic.MOTION_DETECTION_4,
|
||||
name_fn=motion_detection_4_name_fn,
|
||||
supported_fn=guard_suite_supported_fn,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Fence guard state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=EventTopic.FENCE_GUARD,
|
||||
name_fn=fence_guard_name_fn,
|
||||
supported_fn=guard_suite_supported_fn,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Loitering guard state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=EventTopic.LOITERING_GUARD,
|
||||
name_fn=loitering_guard_name_fn,
|
||||
supported_fn=guard_suite_supported_fn,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Motion guard state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=EventTopic.MOTION_GUARD,
|
||||
name_fn=motion_guard_name_fn,
|
||||
supported_fn=guard_suite_supported_fn,
|
||||
),
|
||||
AxisBinarySensorDescription(
|
||||
key="Object analytics state",
|
||||
device_class=BinarySensorDeviceClass.MOTION,
|
||||
event_topic=EventTopic.OBJECT_ANALYTICS,
|
||||
name_fn=object_analytics_name_fn,
|
||||
supported_fn=object_analytics_supported_fn,
|
||||
),
|
||||
EVENT_TOPICS = (
|
||||
EventTopic.DAY_NIGHT_VISION,
|
||||
EventTopic.FENCE_GUARD,
|
||||
EventTopic.LOITERING_GUARD,
|
||||
EventTopic.MOTION_DETECTION,
|
||||
EventTopic.MOTION_DETECTION_3,
|
||||
EventTopic.MOTION_DETECTION_4,
|
||||
EventTopic.MOTION_GUARD,
|
||||
EventTopic.OBJECT_ANALYTICS,
|
||||
EventTopic.PIR,
|
||||
EventTopic.PORT_INPUT,
|
||||
EventTopic.PORT_SUPERVISED_INPUT,
|
||||
EventTopic.SOUND_TRIGGER_LEVEL,
|
||||
)
|
||||
|
||||
|
||||
@@ -181,25 +52,33 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up a Axis binary sensor."""
|
||||
AxisHub.get_hub(hass, config_entry).entity_loader.register_platform(
|
||||
async_add_entities, AxisBinarySensor, ENTITY_DESCRIPTIONS
|
||||
hub = AxisHub.get_hub(hass, config_entry)
|
||||
|
||||
@callback
|
||||
def async_create_entity(event: Event) -> None:
|
||||
"""Create Axis binary sensor entity."""
|
||||
async_add_entities([AxisBinarySensor(event, hub)])
|
||||
|
||||
hub.api.event.subscribe(
|
||||
async_create_entity,
|
||||
topic_filter=EVENT_TOPICS,
|
||||
operation_filter=EventOperation.INITIALIZED,
|
||||
)
|
||||
|
||||
|
||||
class AxisBinarySensor(AxisEventEntity, BinarySensorEntity):
|
||||
"""Representation of a binary Axis event."""
|
||||
|
||||
entity_description: AxisBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self, hub: AxisHub, description: AxisBinarySensorDescription, event: Event
|
||||
) -> None:
|
||||
def __init__(self, event: Event, hub: AxisHub) -> None:
|
||||
"""Initialize the Axis binary sensor."""
|
||||
super().__init__(hub, description, event)
|
||||
|
||||
self._attr_is_on = event.is_tripped
|
||||
super().__init__(event, hub)
|
||||
self.cancel_scheduled_update: Callable[[], None] | None = None
|
||||
|
||||
self._attr_device_class = DEVICE_CLASS.get(event.group)
|
||||
self._attr_is_on = event.is_tripped
|
||||
|
||||
self._set_name(event)
|
||||
|
||||
@callback
|
||||
def async_event_callback(self, event: Event) -> None:
|
||||
"""Update the sensor's state, if needed."""
|
||||
@@ -224,3 +103,45 @@ class AxisBinarySensor(AxisEventEntity, BinarySensorEntity):
|
||||
timedelta(seconds=self.hub.config.trigger_time),
|
||||
scheduled_update,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _set_name(self, event: Event) -> None:
|
||||
"""Set binary sensor name."""
|
||||
if (
|
||||
event.group == EventGroup.INPUT
|
||||
and event.id in self.hub.api.vapix.ports
|
||||
and self.hub.api.vapix.ports[event.id].name
|
||||
):
|
||||
self._attr_name = self.hub.api.vapix.ports[event.id].name
|
||||
|
||||
elif event.group == EventGroup.MOTION:
|
||||
event_data: FenceGuardHandler | LoiteringGuardHandler | MotionGuardHandler | Vmd4Handler | None = None
|
||||
if event.topic_base == EventTopic.FENCE_GUARD:
|
||||
event_data = self.hub.api.vapix.fence_guard
|
||||
elif event.topic_base == EventTopic.LOITERING_GUARD:
|
||||
event_data = self.hub.api.vapix.loitering_guard
|
||||
elif event.topic_base == EventTopic.MOTION_GUARD:
|
||||
event_data = self.hub.api.vapix.motion_guard
|
||||
elif event.topic_base == EventTopic.MOTION_DETECTION_4:
|
||||
event_data = self.hub.api.vapix.vmd4
|
||||
if (
|
||||
event_data
|
||||
and event_data.initialized
|
||||
and (profiles := event_data["0"].profiles)
|
||||
):
|
||||
for profile_id, profile in profiles.items():
|
||||
camera_id = profile.camera
|
||||
if event.id == f"Camera{camera_id}Profile{profile_id}":
|
||||
self._attr_name = f"{self._event_type} {profile.name}"
|
||||
return
|
||||
|
||||
if (
|
||||
event.topic_base == EventTopic.OBJECT_ANALYTICS
|
||||
and self.hub.api.vapix.object_analytics.initialized
|
||||
and (scenarios := self.hub.api.vapix.object_analytics["0"].scenarios)
|
||||
):
|
||||
for scenario_id, scenario in scenarios.items():
|
||||
device_id = scenario.devices[0]["id"]
|
||||
if event.id == f"Device{device_id}Scenario{scenario_id}":
|
||||
self._attr_name = f"{self._event_type} {scenario.name}"
|
||||
break
|
||||
|
@@ -25,7 +25,6 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
@@ -43,9 +42,7 @@ from .errors import AuthenticationRequired, CannotConnect
|
||||
from .hub import AxisHub, get_axis_api
|
||||
|
||||
AXIS_OUI = {"00:40:8c", "ac:cc:8e", "b8:a4:4f"}
|
||||
DEFAULT_PORT = 443
|
||||
DEFAULT_PROTOCOL = "https"
|
||||
PROTOCOL_CHOICES = ["https", "http"]
|
||||
DEFAULT_PORT = 80
|
||||
|
||||
|
||||
class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
@@ -77,19 +74,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
try:
|
||||
api = await get_axis_api(self.hass, MappingProxyType(user_input))
|
||||
|
||||
except AuthenticationRequired:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
else:
|
||||
serial = api.vapix.serial_number
|
||||
await self.async_set_unique_id(format_mac(serial))
|
||||
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_PROTOCOL: user_input[CONF_PROTOCOL],
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_USERNAME: user_input[CONF_USERNAME],
|
||||
@@ -98,7 +87,6 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
)
|
||||
|
||||
self.config = {
|
||||
CONF_PROTOCOL: user_input[CONF_PROTOCOL],
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_USERNAME: user_input[CONF_USERNAME],
|
||||
@@ -108,8 +96,13 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
|
||||
return await self._create_entry(serial)
|
||||
|
||||
except AuthenticationRequired:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
data = self.discovery_schema or {
|
||||
vol.Required(CONF_PROTOCOL): vol.In(PROTOCOL_CHOICES),
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
@@ -146,14 +139,6 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
title = f"{model} - {serial}"
|
||||
return self.async_create_entry(title=title, data=self.config)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Trigger a reconfiguration flow."""
|
||||
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
assert entry
|
||||
return await self._redo_configuration(entry.data, keep_password=True)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -162,22 +147,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
CONF_NAME: entry_data[CONF_NAME],
|
||||
CONF_HOST: entry_data[CONF_HOST],
|
||||
}
|
||||
return await self._redo_configuration(entry_data, keep_password=False)
|
||||
|
||||
async def _redo_configuration(
|
||||
self, entry_data: Mapping[str, Any], keep_password: bool
|
||||
) -> ConfigFlowResult:
|
||||
"""Re-run configuration step."""
|
||||
self.discovery_schema = {
|
||||
vol.Required(
|
||||
CONF_PROTOCOL, default=entry_data.get(CONF_PROTOCOL, "http")
|
||||
): str,
|
||||
vol.Required(CONF_HOST, default=entry_data[CONF_HOST]): str,
|
||||
vol.Required(CONF_USERNAME, default=entry_data[CONF_USERNAME]): str,
|
||||
vol.Required(
|
||||
CONF_PASSWORD,
|
||||
default=entry_data[CONF_PASSWORD] if keep_password else "",
|
||||
): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_PORT, default=entry_data[CONF_PORT]): int,
|
||||
}
|
||||
|
||||
@@ -192,7 +166,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
CONF_HOST: discovery_info.ip,
|
||||
CONF_MAC: format_mac(discovery_info.macaddress),
|
||||
CONF_NAME: discovery_info.hostname,
|
||||
CONF_PORT: 80,
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -236,7 +210,10 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
await self.async_set_unique_id(discovery_info[CONF_MAC])
|
||||
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: discovery_info[CONF_HOST]}
|
||||
updates={
|
||||
CONF_HOST: discovery_info[CONF_HOST],
|
||||
CONF_PORT: discovery_info[CONF_PORT],
|
||||
}
|
||||
)
|
||||
|
||||
self.context.update(
|
||||
@@ -250,11 +227,10 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
)
|
||||
|
||||
self.discovery_schema = {
|
||||
vol.Required(CONF_PROTOCOL): vol.In(PROTOCOL_CHOICES),
|
||||
vol.Required(CONF_HOST, default=discovery_info[CONF_HOST]): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_PORT, default=discovery_info[CONF_PORT]): int,
|
||||
}
|
||||
|
||||
return await self.async_step_user()
|
||||
|
@@ -1,23 +1,16 @@
|
||||
"""Base classes for Axis entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from axis.models.event import Event, EventTopic
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import DOMAIN as AXIS_DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .hub import AxisHub
|
||||
from .hub import AxisHub
|
||||
|
||||
TOPIC_TO_EVENT_TYPE = {
|
||||
EventTopic.DAY_NIGHT_VISION: "DayNight",
|
||||
@@ -39,18 +32,6 @@ TOPIC_TO_EVENT_TYPE = {
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AxisEventDescription(EntityDescription):
|
||||
"""Axis event based entity description."""
|
||||
|
||||
event_topic: tuple[EventTopic, ...] | EventTopic
|
||||
"""Event topic that provides state updates."""
|
||||
name_fn: Callable[[AxisHub, Event], str] = lambda hub, event: ""
|
||||
"""Function providing the corresponding name to the event ID."""
|
||||
supported_fn: Callable[[AxisHub, Event], bool] = lambda hub, event: True
|
||||
"""Function validating if event is supported."""
|
||||
|
||||
|
||||
class AxisEntity(Entity):
|
||||
"""Base common to all Axis entities."""
|
||||
|
||||
@@ -85,26 +66,21 @@ class AxisEntity(Entity):
|
||||
class AxisEventEntity(AxisEntity):
|
||||
"""Base common to all Axis entities from event stream."""
|
||||
|
||||
entity_description: AxisEventDescription
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self, hub: AxisHub, description: AxisEventDescription, event: Event
|
||||
) -> None:
|
||||
def __init__(self, event: Event, hub: AxisHub) -> None:
|
||||
"""Initialize the Axis event."""
|
||||
super().__init__(hub)
|
||||
|
||||
self.entity_description = description
|
||||
|
||||
self._event_id = event.id
|
||||
self._event_topic = event.topic_base
|
||||
self._event_type = TOPIC_TO_EVENT_TYPE[event.topic_base]
|
||||
|
||||
event_type = TOPIC_TO_EVENT_TYPE[event.topic_base]
|
||||
self._attr_name = description.name_fn(hub, event) or f"{event_type} {event.id}"
|
||||
|
||||
self._attr_name = f"{self._event_type} {event.id}"
|
||||
self._attr_unique_id = f"{hub.unique_id}-{event.topic}-{event.id}"
|
||||
|
||||
self._attr_device_class = event.group.value
|
||||
|
||||
@callback
|
||||
@abstractmethod
|
||||
def async_event_callback(self, event: Event) -> None:
|
||||
|
@@ -5,15 +5,9 @@ from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
import axis
|
||||
from axis.models.configuration import Configuration
|
||||
from axis.configuration import Configuration
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_PROTOCOL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
@@ -28,20 +22,21 @@ async def get_axis_api(
|
||||
"""Create a Axis device API."""
|
||||
session = get_async_client(hass, verify_ssl=False)
|
||||
|
||||
api = axis.AxisDevice(
|
||||
device = axis.AxisDevice(
|
||||
Configuration(
|
||||
session,
|
||||
config[CONF_HOST],
|
||||
port=config[CONF_PORT],
|
||||
username=config[CONF_USERNAME],
|
||||
password=config[CONF_PASSWORD],
|
||||
web_proto=config.get(CONF_PROTOCOL, "http"),
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
await api.vapix.initialize()
|
||||
await device.vapix.initialize()
|
||||
|
||||
return device
|
||||
|
||||
except axis.Unauthorized as err:
|
||||
LOGGER.warning(
|
||||
@@ -56,5 +51,3 @@ async def get_axis_api(
|
||||
except axis.AxisException as err:
|
||||
LOGGER.exception("Unknown Axis communication error occurred")
|
||||
raise AuthenticationRequired from err
|
||||
|
||||
return api
|
||||
|
@@ -1,83 +0,0 @@
|
||||
"""Axis network device entity loader.
|
||||
|
||||
Central point to load entities for the different platforms.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from axis.models.event import Event, EventOperation, EventTopic
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from ..entity import AxisEventDescription, AxisEventEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .hub import AxisHub
|
||||
|
||||
|
||||
class AxisEntityLoader:
|
||||
"""Axis network device integration handling platforms for entity registration."""
|
||||
|
||||
def __init__(self, hub: AxisHub) -> None:
|
||||
"""Initialize the Axis entity loader."""
|
||||
self.hub = hub
|
||||
|
||||
self.registered_events: set[tuple[str, EventTopic, str]] = set()
|
||||
self.topic_to_entity: dict[
|
||||
EventTopic,
|
||||
list[
|
||||
tuple[
|
||||
AddEntitiesCallback,
|
||||
type[AxisEventEntity],
|
||||
AxisEventDescription,
|
||||
]
|
||||
],
|
||||
] = {}
|
||||
|
||||
@callback
|
||||
def register_platform(
|
||||
self,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
entity_class: type[AxisEventEntity],
|
||||
descriptions: tuple[AxisEventDescription, ...],
|
||||
) -> None:
|
||||
"""Register Axis entity platforms."""
|
||||
topics: tuple[EventTopic, ...]
|
||||
for description in descriptions:
|
||||
if isinstance(description.event_topic, EventTopic):
|
||||
topics = (description.event_topic,)
|
||||
else:
|
||||
topics = description.event_topic
|
||||
for topic in topics:
|
||||
self.topic_to_entity.setdefault(topic, []).append(
|
||||
(async_add_entities, entity_class, description)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _create_entities_from_event(self, event: Event) -> None:
|
||||
"""Create Axis entities from event."""
|
||||
event_id = (event.topic, event.topic_base, event.id)
|
||||
if event_id in self.registered_events:
|
||||
# Device has restarted and all events are initialized anew
|
||||
return
|
||||
self.registered_events.add(event_id)
|
||||
for (
|
||||
async_add_entities,
|
||||
entity_class,
|
||||
description,
|
||||
) in self.topic_to_entity[event.topic_base]:
|
||||
if not description.supported_fn(self.hub, event):
|
||||
continue
|
||||
async_add_entities([entity_class(self.hub, description, event)])
|
||||
|
||||
@callback
|
||||
def initialize_platforms(self) -> None:
|
||||
"""Prepare event listener that can populate platform entities."""
|
||||
self.hub.api.event.subscribe(
|
||||
self._create_entities_from_event,
|
||||
topic_filter=tuple(self.topic_to_entity.keys()),
|
||||
operation_filter=EventOperation.INITIALIZED,
|
||||
)
|
@@ -6,9 +6,9 @@ from typing import Any
|
||||
|
||||
import axis
|
||||
from axis.errors import Unauthorized
|
||||
from axis.interfaces.mqtt import mqtt_json_to_event
|
||||
from axis.models.mqtt import ClientState
|
||||
from axis.stream_manager import Signal, State
|
||||
from axis.vapix.interfaces.mqtt import mqtt_json_to_event
|
||||
from axis.vapix.models.mqtt import ClientState
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN
|
||||
@@ -22,7 +22,6 @@ from homeassistant.setup import async_when_setup
|
||||
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN
|
||||
from .config import AxisConfig
|
||||
from .entity_loader import AxisEntityLoader
|
||||
|
||||
|
||||
class AxisHub:
|
||||
@@ -34,7 +33,6 @@ class AxisHub:
|
||||
"""Initialize the device."""
|
||||
self.hass = hass
|
||||
self.config = AxisConfig.from_config_entry(config_entry)
|
||||
self.entity_loader = AxisEntityLoader(self)
|
||||
self.api = api
|
||||
|
||||
self.available = True
|
||||
@@ -116,7 +114,7 @@ class AxisHub:
|
||||
if status.status.state == ClientState.ACTIVE:
|
||||
self.config.entry.async_on_unload(
|
||||
await mqtt.async_subscribe(
|
||||
hass, f"{status.config.device_topic_prefix}/#", self.mqtt_message
|
||||
hass, f"{self.api.vapix.serial_number}/#", self.mqtt_message
|
||||
)
|
||||
)
|
||||
|
||||
@@ -124,8 +122,7 @@ class AxisHub:
|
||||
def mqtt_message(self, message: ReceiveMessage) -> None:
|
||||
"""Receive Axis MQTT message."""
|
||||
self.disconnect_from_stream()
|
||||
if message.topic.endswith("event/connection"):
|
||||
return
|
||||
|
||||
event = mqtt_json_to_event(message.payload)
|
||||
self.api.event.handler(event)
|
||||
|
||||
@@ -134,8 +131,6 @@ class AxisHub:
|
||||
@callback
|
||||
def setup(self) -> None:
|
||||
"""Set up the device events."""
|
||||
self.entity_loader.initialize_platforms()
|
||||
|
||||
self.api.stream.connection_status_callback.append(
|
||||
self.connection_status_callback
|
||||
)
|
||||
|
@@ -1,88 +1,75 @@
|
||||
"""Support for Axis lights."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from axis.models.event import Event, EventTopic
|
||||
from axis.models.event import Event, EventOperation, EventTopic
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
LightEntityDescription,
|
||||
)
|
||||
from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .entity import TOPIC_TO_EVENT_TYPE, AxisEventDescription, AxisEventEntity
|
||||
from .entity import AxisEventEntity
|
||||
from .hub import AxisHub
|
||||
|
||||
|
||||
@callback
|
||||
def light_name_fn(hub: AxisHub, event: Event) -> str:
|
||||
"""Provide Axis light entity name."""
|
||||
event_type = TOPIC_TO_EVENT_TYPE[event.topic_base]
|
||||
light_id = f"led{event.id}"
|
||||
light_type = hub.api.vapix.light_control[light_id].light_type
|
||||
return f"{light_type} {event_type} {event.id}"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AxisLightDescription(AxisEventDescription, LightEntityDescription):
|
||||
"""Axis light entity description."""
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
AxisLightDescription(
|
||||
key="Light state control",
|
||||
event_topic=EventTopic.LIGHT_STATUS,
|
||||
name_fn=light_name_fn,
|
||||
supported_fn=lambda hub, event: len(hub.api.vapix.light_control) > 0,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Axis light platform."""
|
||||
AxisHub.get_hub(hass, config_entry).entity_loader.register_platform(
|
||||
async_add_entities, AxisLight, ENTITY_DESCRIPTIONS
|
||||
"""Set up a Axis light."""
|
||||
hub = AxisHub.get_hub(hass, config_entry)
|
||||
|
||||
if hub.api.vapix.light_control is None or len(hub.api.vapix.light_control) == 0:
|
||||
return
|
||||
|
||||
@callback
|
||||
def async_create_entity(event: Event) -> None:
|
||||
"""Create Axis light entity."""
|
||||
async_add_entities([AxisLight(event, hub)])
|
||||
|
||||
hub.api.event.subscribe(
|
||||
async_create_entity,
|
||||
topic_filter=EventTopic.LIGHT_STATUS,
|
||||
operation_filter=EventOperation.INITIALIZED,
|
||||
)
|
||||
|
||||
|
||||
class AxisLight(AxisEventEntity, LightEntity):
|
||||
"""Representation of an Axis light."""
|
||||
|
||||
entity_description: AxisLightDescription
|
||||
"""Representation of a light Axis event."""
|
||||
|
||||
_attr_should_poll = True
|
||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
||||
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
def __init__(
|
||||
self, hub: AxisHub, description: AxisLightDescription, event: Event
|
||||
) -> None:
|
||||
def __init__(self, event: Event, hub: AxisHub) -> None:
|
||||
"""Initialize the Axis light."""
|
||||
super().__init__(hub, description, event)
|
||||
super().__init__(event, hub)
|
||||
|
||||
self._attr_is_on = event.is_tripped
|
||||
self._light_id = f"led{event.id}"
|
||||
|
||||
self.current_intensity = 0
|
||||
self.max_intensity = 0
|
||||
|
||||
light_type = hub.api.vapix.light_control[self._light_id].light_type
|
||||
self._attr_name = f"{light_type} {self._event_type} {event.id}"
|
||||
self._attr_is_on = event.is_tripped
|
||||
|
||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe lights events."""
|
||||
await super().async_added_to_hass()
|
||||
self.current_intensity = (
|
||||
|
||||
current_intensity = (
|
||||
await self.hub.api.vapix.light_control.get_current_intensity(self._light_id)
|
||||
)
|
||||
self.max_intensity = (
|
||||
await self.hub.api.vapix.light_control.get_valid_intensity(self._light_id)
|
||||
).high
|
||||
self.current_intensity = current_intensity
|
||||
|
||||
max_intensity = await self.hub.api.vapix.light_control.get_valid_intensity(
|
||||
self._light_id
|
||||
)
|
||||
self.max_intensity = max_intensity.high
|
||||
|
||||
@callback
|
||||
def async_event_callback(self, event: Event) -> None:
|
||||
@@ -113,6 +100,7 @@ class AxisLight(AxisEventEntity, LightEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update brightness."""
|
||||
self.current_intensity = (
|
||||
current_intensity = (
|
||||
await self.hub.api.vapix.light_control.get_current_intensity(self._light_id)
|
||||
)
|
||||
self.current_intensity = current_intensity
|
||||
|
@@ -26,7 +26,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["axis==60"],
|
||||
"requirements": ["axis==54"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
@@ -1,63 +1,46 @@
|
||||
"""Support for Axis switches."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from axis.models.event import Event, EventTopic
|
||||
from axis.models.event import Event, EventOperation, EventTopic
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
SwitchEntityDescription,
|
||||
)
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .entity import AxisEventDescription, AxisEventEntity
|
||||
from .entity import AxisEventEntity
|
||||
from .hub import AxisHub
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AxisSwitchDescription(AxisEventDescription, SwitchEntityDescription):
|
||||
"""Axis switch entity description."""
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
AxisSwitchDescription(
|
||||
key="Relay state control",
|
||||
device_class=SwitchDeviceClass.OUTLET,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
event_topic=EventTopic.RELAY,
|
||||
supported_fn=lambda hub, event: isinstance(int(event.id), int),
|
||||
name_fn=lambda hub, event: hub.api.vapix.ports[event.id].name,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Axis switch platform."""
|
||||
AxisHub.get_hub(hass, config_entry).entity_loader.register_platform(
|
||||
async_add_entities, AxisSwitch, ENTITY_DESCRIPTIONS
|
||||
"""Set up a Axis switch."""
|
||||
hub = AxisHub.get_hub(hass, config_entry)
|
||||
|
||||
@callback
|
||||
def async_create_entity(event: Event) -> None:
|
||||
"""Create Axis switch entity."""
|
||||
async_add_entities([AxisSwitch(event, hub)])
|
||||
|
||||
hub.api.event.subscribe(
|
||||
async_create_entity,
|
||||
topic_filter=EventTopic.RELAY,
|
||||
operation_filter=EventOperation.INITIALIZED,
|
||||
)
|
||||
|
||||
|
||||
class AxisSwitch(AxisEventEntity, SwitchEntity):
|
||||
"""Representation of a Axis switch."""
|
||||
|
||||
entity_description: AxisSwitchDescription
|
||||
|
||||
def __init__(
|
||||
self, hub: AxisHub, description: AxisSwitchDescription, event: Event
|
||||
) -> None:
|
||||
def __init__(self, event: Event, hub: AxisHub) -> None:
|
||||
"""Initialize the Axis switch."""
|
||||
super().__init__(hub, description, event)
|
||||
|
||||
super().__init__(event, hub)
|
||||
if event.id and hub.api.vapix.ports[event.id].name:
|
||||
self._attr_name = hub.api.vapix.ports[event.id].name
|
||||
self._attr_is_on = event.is_tripped
|
||||
|
||||
@callback
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"create": "mdi:cloud-upload"
|
||||
}
|
||||
}
|
@@ -2,21 +2,21 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"filter_1": {
|
||||
"default": "mdi:sync",
|
||||
"default": "mdi:sync-off",
|
||||
"state": {
|
||||
"off": "mdi:sync-off"
|
||||
"on": "mdi:sync"
|
||||
}
|
||||
},
|
||||
"filter_2": {
|
||||
"default": "mdi:sync",
|
||||
"default": "mdi:sync-off",
|
||||
"state": {
|
||||
"off": "mdi:sync-off"
|
||||
"on": "mdi:sync"
|
||||
}
|
||||
},
|
||||
"circ_pump": {
|
||||
"default": "mdi:pump",
|
||||
"default": "mdi:pump-off",
|
||||
"state": {
|
||||
"off": "mdi:pump-off"
|
||||
"on": "mdi:pump"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"reload": "mdi:reload"
|
||||
}
|
||||
}
|
13
homeassistant/components/binary_sensor/group.py
Normal file
13
homeassistant/components/binary_sensor/group.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Describe group states."""
|
||||
|
||||
from homeassistant.components.group import GroupIntegrationRegistry
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
|
||||
@callback
|
||||
def async_describe_on_off_states(
|
||||
hass: HomeAssistant, registry: GroupIntegrationRegistry
|
||||
) -> None:
|
||||
"""Describe group on off states."""
|
||||
registry.on_off_states({STATE_ON}, STATE_OFF)
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"set_all_zones": "mdi:home-sound-in"
|
||||
}
|
||||
}
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from bluecurrent_api import Client
|
||||
@@ -15,17 +16,24 @@ from bluecurrent_api.exceptions import (
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_NAME, CONF_API_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.const import (
|
||||
ATTR_NAME,
|
||||
CONF_API_TOKEN,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
CHARGE_POINTS = "CHARGE_POINTS"
|
||||
DATA = "data"
|
||||
DELAY = 5
|
||||
SMALL_DELAY = 1
|
||||
LARGE_DELAY = 20
|
||||
|
||||
GRID = "GRID"
|
||||
OBJECT = "object"
|
||||
@@ -40,19 +48,26 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
connector = Connector(hass, config_entry, client)
|
||||
|
||||
try:
|
||||
await client.validate_api_token(api_token)
|
||||
await connector.connect(api_token)
|
||||
except InvalidApiToken as err:
|
||||
raise ConfigEntryAuthFailed("Invalid API token.") from err
|
||||
except BlueCurrentException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
config_entry.async_create_background_task(
|
||||
hass, connector.run_task(), "blue_current-websocket"
|
||||
)
|
||||
|
||||
await client.wait_for_charge_points()
|
||||
hass.async_create_background_task(connector.start_loop(), "blue_current-websocket")
|
||||
await client.get_charge_points()
|
||||
|
||||
await client.wait_for_response()
|
||||
hass.data[DOMAIN][config_entry.entry_id] = connector
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
config_entry.async_on_unload(connector.disconnect)
|
||||
|
||||
async def _async_disconnect_websocket(_: Event) -> None:
|
||||
await connector.disconnect()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_disconnect_websocket)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -80,6 +95,12 @@ class Connector:
|
||||
self.client = client
|
||||
self.charge_points: dict[str, dict] = {}
|
||||
self.grid: dict[str, Any] = {}
|
||||
self.available = False
|
||||
|
||||
async def connect(self, token: str) -> None:
|
||||
"""Register on_data and connect to the websocket."""
|
||||
await self.client.connect(token)
|
||||
self.available = True
|
||||
|
||||
async def on_data(self, message: dict) -> None:
|
||||
"""Handle received data."""
|
||||
@@ -111,9 +132,9 @@ class Connector:
|
||||
entry[EVSE_ID], entry[MODEL_TYPE], entry[ATTR_NAME]
|
||||
)
|
||||
for entry in charge_points_data
|
||||
),
|
||||
self.client.get_grid_status(charge_points_data[0][EVSE_ID]),
|
||||
)
|
||||
)
|
||||
await self.client.get_grid_status(charge_points_data[0][EVSE_ID])
|
||||
|
||||
async def handle_charge_point(self, evse_id: str, model: str, name: str) -> None:
|
||||
"""Add the chargepoint and request their data."""
|
||||
@@ -127,53 +148,44 @@ class Connector:
|
||||
def update_charge_point(self, evse_id: str, data: dict) -> None:
|
||||
"""Update the charge point data."""
|
||||
self.charge_points[evse_id].update(data)
|
||||
self.dispatch_charge_point_update_signal(evse_id)
|
||||
self.dispatch_value_update_signal(evse_id)
|
||||
|
||||
def dispatch_charge_point_update_signal(self, evse_id: str) -> None:
|
||||
"""Dispatch a charge point update signal."""
|
||||
async_dispatcher_send(self.hass, f"{DOMAIN}_charge_point_update_{evse_id}")
|
||||
def dispatch_value_update_signal(self, evse_id: str) -> None:
|
||||
"""Dispatch a value signal."""
|
||||
async_dispatcher_send(self.hass, f"{DOMAIN}_value_update_{evse_id}")
|
||||
|
||||
def dispatch_grid_update_signal(self) -> None:
|
||||
"""Dispatch a grid update signal."""
|
||||
"""Dispatch a grid signal."""
|
||||
async_dispatcher_send(self.hass, f"{DOMAIN}_grid_update")
|
||||
|
||||
async def on_open(self) -> None:
|
||||
"""Fetch data when connection is established."""
|
||||
await self.client.get_charge_points()
|
||||
|
||||
async def run_task(self) -> None:
|
||||
async def start_loop(self) -> None:
|
||||
"""Start the receive loop."""
|
||||
try:
|
||||
while True:
|
||||
try:
|
||||
await self.client.connect(self.on_data, self.on_open)
|
||||
except RequestLimitReached:
|
||||
LOGGER.warning(
|
||||
"Request limit reached. reconnecting at 00:00 (Europe/Amsterdam)"
|
||||
)
|
||||
delay = self.client.get_next_reset_delta().seconds
|
||||
except WebsocketError:
|
||||
LOGGER.debug("Disconnected, retrying in background")
|
||||
delay = DELAY
|
||||
await self.client.start_loop(self.on_data)
|
||||
except BlueCurrentException as err:
|
||||
LOGGER.warning(
|
||||
"Disconnected from the Blue Current websocket. Retrying to connect in background. %s",
|
||||
err,
|
||||
)
|
||||
|
||||
self._on_disconnect()
|
||||
await asyncio.sleep(delay)
|
||||
finally:
|
||||
await self._disconnect()
|
||||
async_call_later(self.hass, SMALL_DELAY, self.reconnect)
|
||||
|
||||
def _on_disconnect(self) -> None:
|
||||
"""Dispatch signals to update entity states."""
|
||||
for evse_id in self.charge_points:
|
||||
self.dispatch_charge_point_update_signal(evse_id)
|
||||
self.dispatch_grid_update_signal()
|
||||
async def reconnect(self, _event_time: datetime | None = None) -> None:
|
||||
"""Keep trying to reconnect to the websocket."""
|
||||
try:
|
||||
await self.connect(self.config.data[CONF_API_TOKEN])
|
||||
LOGGER.debug("Reconnected to the Blue Current websocket")
|
||||
self.hass.async_create_task(self.start_loop())
|
||||
except RequestLimitReached:
|
||||
self.available = False
|
||||
async_call_later(
|
||||
self.hass, self.client.get_next_reset_delta(), self.reconnect
|
||||
)
|
||||
except WebsocketError:
|
||||
self.available = False
|
||||
async_call_later(self.hass, LARGE_DELAY, self.reconnect)
|
||||
|
||||
async def _disconnect(self) -> None:
|
||||
async def disconnect(self) -> None:
|
||||
"""Disconnect from the websocket."""
|
||||
with suppress(WebsocketError):
|
||||
await self.client.disconnect()
|
||||
self._on_disconnect()
|
||||
|
||||
@property
|
||||
def connected(self) -> bool:
|
||||
"""Returns the connection status."""
|
||||
return self.client.is_connected()
|
||||
|
@@ -40,7 +40,7 @@ class BlueCurrentEntity(Entity):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return entity availability."""
|
||||
return self.connector.connected and self.has_value
|
||||
return self.connector.available and self.has_value
|
||||
|
||||
@callback
|
||||
@abstractmethod
|
||||
@@ -53,7 +53,7 @@ class ChargepointEntity(BlueCurrentEntity):
|
||||
|
||||
def __init__(self, connector: Connector, evse_id: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(connector, f"{DOMAIN}_charge_point_update_{evse_id}")
|
||||
super().__init__(connector, f"{DOMAIN}_value_update_{evse_id}")
|
||||
|
||||
chargepoint_name = connector.charge_points[evse_id][ATTR_NAME]
|
||||
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.2.3"]
|
||||
"requirements": ["bluecurrent-api==1.0.6"]
|
||||
}
|
||||
|
@@ -26,14 +26,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
data = BlueMaestroBluetoothDeviceData()
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (
|
||||
PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[
|
||||
entry.entry_id
|
||||
] = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
|
@@ -366,7 +366,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
data = None
|
||||
elif response.status == 595:
|
||||
_LOGGER.info("Status 595 returned, treating as timeout")
|
||||
raise BluesoundPlayer._TimeoutException
|
||||
raise BluesoundPlayer._TimeoutException()
|
||||
else:
|
||||
_LOGGER.error("Error %s on %s", response.status, url)
|
||||
return None
|
||||
@@ -432,7 +432,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self.async_write_ha_state()
|
||||
elif response.status == 595:
|
||||
_LOGGER.info("Status 595 returned, treating as timeout")
|
||||
raise BluesoundPlayer._TimeoutException
|
||||
raise BluesoundPlayer._TimeoutException()
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Error %s on %s. Trying one more time", response.status, url
|
||||
|
@@ -166,9 +166,7 @@ async def _async_start_adapter_discovery(
|
||||
"""Shutdown debouncer."""
|
||||
discovery_debouncer.async_shutdown()
|
||||
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _async_shutdown_debouncer, run_immediately=True
|
||||
)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_shutdown_debouncer)
|
||||
|
||||
async def _async_call_debouncer(now: datetime.datetime) -> None:
|
||||
"""Call the debouncer at a later time."""
|
||||
@@ -199,9 +197,7 @@ async def _async_start_adapter_discovery(
|
||||
|
||||
cancel = usb.async_register_scan_request_callback(hass, _async_trigger_discovery)
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
hass_callback(lambda event: cancel()),
|
||||
run_immediately=True,
|
||||
EVENT_HOMEASSISTANT_STOP, hass_callback(lambda event: cancel())
|
||||
)
|
||||
|
||||
|
||||
|
@@ -135,11 +135,9 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
self._bluetooth_adapters, self.storage
|
||||
)
|
||||
self._cancel_logging_listener = self.hass.bus.async_listen(
|
||||
EVENT_LOGGING_CHANGED, self._async_logging_changed, run_immediately=True
|
||||
)
|
||||
self.hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, self.async_stop, run_immediately=True
|
||||
EVENT_LOGGING_CHANGED, self._async_logging_changed
|
||||
)
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.async_stop)
|
||||
seen: set[str] = set()
|
||||
for address, service_info in itertools.chain(
|
||||
self._connectable_history.items(), self._all_history.items()
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"bleak==0.21.1",
|
||||
"bleak-retry-connector==3.4.0",
|
||||
"bluetooth-adapters==0.18.0",
|
||||
"bluetooth-auto-recovery==1.4.0",
|
||||
"bluetooth-auto-recovery==1.3.0",
|
||||
"bluetooth-data-tools==1.19.0",
|
||||
"dbus-fast==2.21.1",
|
||||
"habluetooth==2.4.2"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user