Compare commits

..

1 Commits

Author SHA1 Message Date
Shay Levy
628cc5dccb Update opencv-python-headless to 4.9.0.80 2024-03-13 22:41:47 +00:00
11685 changed files with 277263 additions and 723892 deletions

View File

@@ -14,7 +14,6 @@ core: &core
base_platforms: &base_platforms
- homeassistant/components/air_quality/**
- homeassistant/components/alarm_control_panel/**
- homeassistant/components/assist_satellite/**
- homeassistant/components/binary_sensor/**
- homeassistant/components/button/**
- homeassistant/components/calendar/**
@@ -50,7 +49,6 @@ base_platforms: &base_platforms
- homeassistant/components/tts/**
- homeassistant/components/update/**
- homeassistant/components/vacuum/**
- homeassistant/components/valve/**
- homeassistant/components/water_heater/**
- homeassistant/components/weather/**
@@ -62,7 +60,6 @@ components: &components
- homeassistant/components/auth/**
- homeassistant/components/automation/**
- homeassistant/components/backup/**
- homeassistant/components/blueprint/**
- homeassistant/components/bluetooth/**
- homeassistant/components/cloud/**
- homeassistant/components/config/**
@@ -111,7 +108,6 @@ components: &components
- homeassistant/components/tag/**
- homeassistant/components/template/**
- homeassistant/components/timer/**
- homeassistant/components/trace/**
- homeassistant/components/usb/**
- homeassistant/components/webhook/**
- homeassistant/components/websocket_api/**
@@ -124,20 +120,21 @@ tests: &tests
- pylint/**
- requirements_test_pre_commit.txt
- requirements_test.txt
- tests/*.py
- tests/auth/**
- tests/backports/**
- tests/components/diagnostics/**
- tests/common.py
- tests/components/history/**
- tests/components/logbook/**
- tests/components/recorder/**
- tests/components/repairs/**
- tests/components/sensor/**
- tests/conftest.py
- tests/hassfest/**
- tests/helpers/**
- tests/ignore_uncaught_exceptions.py
- tests/mock/**
- tests/pylint/**
- tests/scripts/**
- tests/syrupy.py
- tests/test_util/**
- tests/testing_config/**
- tests/util/**
@@ -151,7 +148,6 @@ requirements: &requirements
- homeassistant/package_constraints.txt
- requirements*.txt
- pyproject.toml
- script/licenses.py
any:
- *base_platforms

1744
.coveragerc Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -4,12 +4,7 @@
"dockerFile": "../Dockerfile.dev",
"postCreateCommand": "script/setup",
"postStartCommand": "script/bootstrap",
"containerEnv": {
"PYTHONASYNCIODEBUG": "1"
},
"features": {
"ghcr.io/devcontainers/features/github-cli:1": {}
},
"containerEnv": { "DEVCONTAINER": "1" },
// Port 5683 udp is used by Shelly integration
"appPort": ["8123:8123", "5683:5683/udp"],
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
@@ -22,17 +17,12 @@
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github",
"GitHub.copilot"
"GitHub.vscode-pull-request-github"
],
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
"settings": {
"python.experiments.optOutFrom": ["pythonTestAdapter"],
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.pythonPath": "/usr/local/bin/python",
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,

View File

@@ -1,14 +0,0 @@
# Black
4de97abc3aa83188666336ce0a015a5bab75bc8f
# Switch formatting from black to ruff-format (#102893)
706add4a57120a93d7b7fe40e722b00d634c76c2
# Prettify json (component test fixtures) (#68892)
053c4428a933c3c04c22642f93c93fccba3e8bfd
# Prettify json (tests) (#68888)
496d90bf00429d9d924caeb0155edc0bf54e86b9
# Bump ruff to 0.3.4 (#112690)
6bb4e7d62c60389608acf4a7d7dacd8f029307dd

View File

@@ -74,6 +74,7 @@ If the code communicates with devices, web services, or third-party tools:
- [ ] New or updated dependencies have been added to `requirements_all.txt`.
Updated by running `python3 -m script.gen_requirements_all`.
- [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description.
- [ ] Untested files have been added to `.coveragerc`.
<!--
This project is very active and we have a high turnover of pull requests.

View File

@@ -12,8 +12,6 @@ env:
BUILD_TYPE: core
DEFAULT_PYTHON: "3.12"
PIP_TIMEOUT: 60
UV_HTTP_TIMEOUT: 60
UV_SYSTEM_PYTHON: "true"
jobs:
init:
@@ -27,12 +25,12 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -51,29 +49,41 @@ jobs:
with:
ignore-dev: true
- name: Fail if translations files are checked in
run: |
if [ -n "$(find homeassistant/components/*/translations -type f)" ]; then
echo "Translations files are checked in, please remove the following files:"
find homeassistant/components/*/translations -type f
exit 1
fi
build_python:
name: Build PyPi package
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
runs-on: ubuntu-latest
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download Translations
run: python3 -m script.translations download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
- name: Archive translations
- name: Build package
shell: bash
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install twine build
python -m build
- name: Upload translations
uses: actions/upload-artifact@v4.4.0
with:
name: translations
path: translations.tar.gz
if-no-files-found: error
- name: Upload package
shell: bash
run: |
export TWINE_USERNAME="__token__"
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
twine upload dist/* --skip-existing
build_base:
name: Build ${{ matrix.arch }} base core image
@@ -85,16 +95,15 @@ jobs:
packages: write
id-token: write
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v6
uses: dawidd6/action-download-artifact@v3.1.2
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend
@@ -105,7 +114,7 @@ jobs:
- name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v6
uses: dawidd6/action-download-artifact@v3.1.2
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/intents-package
@@ -116,20 +125,17 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Adjust nightly version
if: needs.init.outputs.channel == 'dev'
shell: bash
env:
UV_PRERELEASE: allow
run: |
python3 -m pip install "$(grep '^uv' < requirements.txt)"
uv pip install packaging tomli
uv pip install .
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
python3 -m pip install packaging tomli
python3 -m pip install .
version="$(python3 script/version_bump.py nightly)"
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
@@ -141,7 +147,7 @@ jobs:
sed -i "s|home-assistant-frontend==.*|home-assistant-frontend==${BASH_REMATCH[1]}|" \
homeassistant/package_constraints.txt
sed -i "s|home-assistant-frontend==.*||" requirements_all.txt
python -m script.gen_requirements_all
fi
if [[ "$(ls home_assistant_intents*.whl)" =~ ^home_assistant_intents-(.*)-py3-none-any.whl$ ]]; then
@@ -159,7 +165,7 @@ jobs:
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
homeassistant/package_constraints.txt
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
python -m script.gen_requirements_all
fi
- name: Adjustments for armhf
@@ -174,15 +180,19 @@ jobs:
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.1.8
with:
name: translations
- name: Extract translations
- name: Adjustments for 64-bit
if: matrix.arch == 'amd64' || matrix.arch == 'aarch64'
run: |
tar xvf translations.tar.gz
rm translations.tar.gz
# Some speedups are only available on 64-bit, and since
# we build 32bit images on 64bit hosts, we only enable
# the speed ups on 64bit since the wheels for 32bit
# are not available.
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
- name: Download Translations
run: python3 -m script.translations download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
- name: Write meta info file
shell: bash
@@ -190,14 +200,14 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.01.0
with:
args: |
$BUILD_ARGS \
@@ -206,6 +216,17 @@ jobs:
--target /data \
--generic ${{ needs.init.outputs.version }}
- name: Archive translations
shell: bash
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations
uses: actions/upload-artifact@v3
with:
name: translations
path: translations.tar.gz
if-no-files-found: error
build_machine:
name: Build ${{ matrix.machine }} machine core image
if: github.repository_owner == 'home-assistant'
@@ -242,7 +263,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Set build additional args
run: |
@@ -256,14 +277,14 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2024.01.0
with:
args: |
$BUILD_ARGS \
@@ -279,7 +300,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -315,29 +336,23 @@ jobs:
contents: read
packages: write
id-token: write
strategy:
fail-fast: false
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.6.0
uses: sigstore/cosign-installer@v3.4.0
with:
cosign-release: "v2.2.3"
cosign-release: "v2.0.2"
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -351,37 +366,41 @@ jobs:
function create_manifest() {
local tag_l=${1}
local tag_r=${2}
local registry=${{ matrix.registry }}
docker manifest create "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
"${registry}/i386-homeassistant:${tag_r}" \
"${registry}/armhf-homeassistant:${tag_r}" \
"${registry}/armv7-homeassistant:${tag_r}" \
"${registry}/aarch64-homeassistant:${tag_r}"
for registry in "ghcr.io/home-assistant" "docker.io/homeassistant"
do
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
docker manifest create "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
"${registry}/i386-homeassistant:${tag_r}" \
"${registry}/armhf-homeassistant:${tag_r}" \
"${registry}/armv7-homeassistant:${tag_r}" \
"${registry}/aarch64-homeassistant:${tag_r}"
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/i386-homeassistant:${tag_r}" \
--os linux --arch 386
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armhf-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v6
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/i386-homeassistant:${tag_r}" \
--os linux --arch 386
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armv7-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v7
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armhf-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v6
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armv7-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v7
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
cosign sign --yes "${registry}/home-assistant:${tag_l}"
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
cosign sign --yes "${registry}/home-assistant:${tag_l}"
done
}
function validate_image() {
@@ -414,14 +433,12 @@ jobs:
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
# Upload images to dockerhub
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
fi
# Upload images to dockerhub
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
# Create version tag
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
@@ -442,97 +459,3 @@ jobs:
v="${{ needs.init.outputs.version }}"
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
fi
build_python:
name: Build PyPi package
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
runs-on: ubuntu-latest
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.1.8
with:
name: translations
- name: Extract translations
run: |
tar xvf translations.tar.gz
rm translations.tar.gz
- name: Build package
shell: bash
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install twine build
python -m build
- name: Upload package
shell: bash
run: |
export TWINE_USERNAME="__token__"
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
twine upload dist/* --skip-existing
hassfest-image:
name: Build and test hassfest image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
needs: ["init"]
if: github.repository_owner == 'home-assistant'
env:
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
load: true
tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
push: true
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true

File diff suppressed because it is too large Load Diff

View File

@@ -21,14 +21,14 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.26.9
uses: github/codeql-action/init@v3.24.7
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.26.9
uses: github/codeql-action/analyze@v3.24.7
with:
category: "/language:python"

View File

@@ -10,7 +10,7 @@ on:
- "**strings.json"
env:
DEFAULT_PYTHON: "3.12"
DEFAULT_PYTHON: "3.11"
jobs:
upload:
@@ -19,10 +19,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -14,10 +14,6 @@ on:
- "homeassistant/package_constraints.txt"
- "requirements_all.txt"
- "requirements.txt"
- "script/gen_requirements_all.py"
env:
DEFAULT_PYTHON: "3.12"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name}}
@@ -32,22 +28,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Create Python virtual environment
run: |
python -m venv venv
. venv/bin/activate
python --version
pip install "$(grep '^uv' < requirements.txt)"
uv pip install -r requirements.txt
uses: actions/checkout@v4.1.2
- name: Get information
id: info
@@ -82,31 +63,19 @@ jobs:
) > .env_file
- name: Upload env_file
uses: actions/upload-artifact@v4.4.0
uses: actions/upload-artifact@v4.3.1
with:
name: env_file
path: ./.env_file
include-hidden-files: true
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.4.0
uses: actions/upload-artifact@v4.3.1
with:
name: requirements_diff
path: ./requirements_diff.txt
overwrite: true
- name: Generate requirements
run: |
. venv/bin/activate
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.4.0
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
core:
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
if: github.repository_owner == 'home-assistant'
@@ -119,26 +88,20 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Download env_file
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.4
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.4
with:
name: requirements_diff
- name: Adjust build env
run: |
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -146,7 +109,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
skip-binary: aiohttp;multidict;yarl
skip-binary: aiohttp
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements.txt"
@@ -163,42 +126,57 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.7
uses: actions/checkout@v4.1.2
- name: Download env_file
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.4
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v4.1.4
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.1.8
with:
name: requirements_all_wheels
- name: Adjust build env
- name: (Un)comment packages
run: |
if [ "${{ matrix.arch }}" = "i386" ]; then
echo "NPY_DISABLE_SVML=1" >> .env_file
fi
requirement_files="requirements_all.txt requirements_diff.txt"
for requirement_file in ${requirement_files}; do
sed -i "s|# fritzconnection|fritzconnection|g" ${requirement_file}
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
sed -i "s|# evdev|evdev|g" ${requirement_file}
sed -i "s|# pycups|pycups|g" ${requirement_file}
sed -i "s|# homekit|homekit|g" ${requirement_file}
sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file}
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
# Do not pin numpy in wheels building
sed -i "/numpy/d" homeassistant/package_constraints.txt
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
# Some packages are not buildable on armhf anymore
if [ "${{ matrix.arch }}" = "armhf" ]; then
# Pandas has issues building on armhf, it is expected they
# will drop the platform in the near future (they consider it
# "flimsy" on 386). The following packages depend on pandas,
# so we comment them out.
sed -i "s|env-canada|# env-canada|g" ${requirement_file}
sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file}
sed -i "s|pyezviz|# pyezviz|g" ${requirement_file}
sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file}
fi
# Some speedups are only for 64-bit
if [ "${{ matrix.arch }}" = "amd64" ] || [ "${{ matrix.arch }}" = "aarch64" ]; then
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" ${requirement_file}
fi
done
- name: Split requirements all
run: |
# We split requirements all into multiple files.
# We split requirements all into two different files.
# This is to prevent the build from running out of memory when
# resolving packages on 32-bits systems (like armhf, armv7).
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt
- name: Create requirements for cython<3
run: |
@@ -212,8 +190,17 @@ jobs:
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
- name: Adjust build env
run: |
if [ "${{ matrix.arch }}" = "i386" ]; then
echo "NPY_DISABLE_SVML=1" >> .env_file
fi
# Do not pin numpy in wheels building
sed -i "/numpy/d" homeassistant/package_constraints.txt
- name: Build wheels (old cython)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -221,14 +208,14 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_old-cython.txt"
pip: "'cython<3'"
- name: Build wheels (part 1)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -236,13 +223,13 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
- name: Build wheels (part 2)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -250,13 +237,13 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
- name: Build wheels (part 3)
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -264,7 +251,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"

4
.gitignore vendored
View File

@@ -34,7 +34,6 @@ Icon
# GITHUB Proposed Python stuff:
*.py[cod]
__pycache__
# C extensions
*.so
@@ -133,6 +132,3 @@ tmp_cache
# python-language-server / Rope
.ropeproject
# Will be created from script/split_tests.py
pytest_buckets.txt

View File

@@ -1,21 +1,21 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.6
rev: v0.2.1
hooks:
- id: ruff
args:
- --fix
- id: ruff-format
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
- repo: https://github.com/codespell-project/codespell
rev: v2.3.0
rev: v2.2.2
hooks:
- id: codespell
args:
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
- --ignore-words-list=additionals,alle,alot,bund,currenty,datas,farenheit,falsy,fo,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,withing,zar
- --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2
exclude_types: [csv, json, html]
exclude_types: [csv, json]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
@@ -30,7 +30,7 @@ repos:
- --branch=master
- --branch=rc
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.35.1
rev: v1.32.0
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/mirrors-prettier
@@ -61,15 +61,15 @@ repos:
name: mypy
entry: script/run-in-env.sh mypy
language: script
types_or: [python, pyi]
types: [python]
require_serial: true
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
files: ^(homeassistant|pylint)/.+\.py$
- id: pylint
name: pylint
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
language: script
types_or: [python, pyi]
files: ^(homeassistant|tests)/.+\.(py|pyi)$
types: [python]
files: ^homeassistant/.+\.py$
- id: gen_requirements_all
name: gen_requirements_all
entry: script/run-in-env.sh python3 -m script.gen_requirements_all
@@ -83,7 +83,7 @@ repos:
pass_filenames: false
language: script
types: [text]
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements\.txt)$
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py)$
- id: hassfest-metadata
name: hassfest-metadata
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata

View File

@@ -21,7 +21,6 @@ homeassistant.helpers.entity_platform
homeassistant.helpers.entity_values
homeassistant.helpers.event
homeassistant.helpers.reload
homeassistant.helpers.script
homeassistant.helpers.script_variables
homeassistant.helpers.singleton
homeassistant.helpers.sun
@@ -49,7 +48,6 @@ homeassistant.components.adax.*
homeassistant.components.adguard.*
homeassistant.components.aftership.*
homeassistant.components.air_quality.*
homeassistant.components.airgradient.*
homeassistant.components.airly.*
homeassistant.components.airnow.*
homeassistant.components.airq.*
@@ -67,7 +65,7 @@ homeassistant.components.alexa.*
homeassistant.components.alpha_vantage.*
homeassistant.components.amazon_polly.*
homeassistant.components.amberelectric.*
homeassistant.components.ambient_network.*
homeassistant.components.ambiclimate.*
homeassistant.components.ambient_station.*
homeassistant.components.amcrest.*
homeassistant.components.ampio.*
@@ -85,7 +83,6 @@ homeassistant.components.api.*
homeassistant.components.apple_tv.*
homeassistant.components.apprise.*
homeassistant.components.aprs.*
homeassistant.components.apsystems.*
homeassistant.components.aqualogic.*
homeassistant.components.aquostv.*
homeassistant.components.aranet.*
@@ -95,9 +92,9 @@ homeassistant.components.aruba.*
homeassistant.components.arwn.*
homeassistant.components.aseko_pool_live.*
homeassistant.components.assist_pipeline.*
homeassistant.components.assist_satellite.*
homeassistant.components.asterisk_cdr.*
homeassistant.components.asterisk_mbox.*
homeassistant.components.asuswrt.*
homeassistant.components.autarco.*
homeassistant.components.auth.*
homeassistant.components.automation.*
homeassistant.components.awair.*
@@ -111,7 +108,6 @@ homeassistant.components.bitcoin.*
homeassistant.components.blockchain.*
homeassistant.components.blue_current.*
homeassistant.components.blueprint.*
homeassistant.components.bluesound.*
homeassistant.components.bluetooth.*
homeassistant.components.bluetooth_adapters.*
homeassistant.components.bluetooth_tracker.*
@@ -120,7 +116,6 @@ homeassistant.components.bond.*
homeassistant.components.braviatv.*
homeassistant.components.brother.*
homeassistant.components.browser.*
homeassistant.components.bryant_evolution.*
homeassistant.components.bthome.*
homeassistant.components.button.*
homeassistant.components.calendar.*
@@ -141,7 +136,6 @@ homeassistant.components.cpuspeed.*
homeassistant.components.crownstone.*
homeassistant.components.date.*
homeassistant.components.datetime.*
homeassistant.components.deako.*
homeassistant.components.deconz.*
homeassistant.components.default_config.*
homeassistant.components.demo.*
@@ -169,16 +163,13 @@ homeassistant.components.ecowitt.*
homeassistant.components.efergy.*
homeassistant.components.electrasmart.*
homeassistant.components.electric_kiwi.*
homeassistant.components.elevenlabs.*
homeassistant.components.elgato.*
homeassistant.components.elkm1.*
homeassistant.components.emulated_hue.*
homeassistant.components.energenie_power_sockets.*
homeassistant.components.energy.*
homeassistant.components.energyzero.*
homeassistant.components.enigma2.*
homeassistant.components.enphase_envoy.*
homeassistant.components.eq3btsmart.*
homeassistant.components.esphome.*
homeassistant.components.event.*
homeassistant.components.evil_genius_labs.*
@@ -199,9 +190,7 @@ homeassistant.components.fritzbox.*
homeassistant.components.fritzbox_callmonitor.*
homeassistant.components.fronius.*
homeassistant.components.frontend.*
homeassistant.components.fujitsu_fglair.*
homeassistant.components.fully_kiosk.*
homeassistant.components.fyta.*
homeassistant.components.generic_hygrostat.*
homeassistant.components.generic_thermostat.*
homeassistant.components.geo_location.*
@@ -211,8 +200,6 @@ homeassistant.components.glances.*
homeassistant.components.goalzero.*
homeassistant.components.google.*
homeassistant.components.google_assistant_sdk.*
homeassistant.components.google_cloud.*
homeassistant.components.google_photos.*
homeassistant.components.google_sheets.*
homeassistant.components.gpsd.*
homeassistant.components.greeneye_monitor.*
@@ -245,7 +232,6 @@ homeassistant.components.homeworks.*
homeassistant.components.http.*
homeassistant.components.huawei_lte.*
homeassistant.components.humidifier.*
homeassistant.components.husqvarna_automower.*
homeassistant.components.hydrawise.*
homeassistant.components.hyperion.*
homeassistant.components.ibeacon.*
@@ -254,7 +240,6 @@ homeassistant.components.image.*
homeassistant.components.image_processing.*
homeassistant.components.image_upload.*
homeassistant.components.imap.*
homeassistant.components.imgw_pib.*
homeassistant.components.input_button.*
homeassistant.components.input_select.*
homeassistant.components.input_text.*
@@ -262,7 +247,6 @@ homeassistant.components.integration.*
homeassistant.components.intent.*
homeassistant.components.intent_script.*
homeassistant.components.ios.*
homeassistant.components.iotty.*
homeassistant.components.ipp.*
homeassistant.components.iqvia.*
homeassistant.components.islamic_prayer_times.*
@@ -271,7 +255,6 @@ homeassistant.components.jellyfin.*
homeassistant.components.jewish_calendar.*
homeassistant.components.jvc_projector.*
homeassistant.components.kaleidescape.*
homeassistant.components.knocki.*
homeassistant.components.knx.*
homeassistant.components.kraken.*
homeassistant.components.lacrosse.*
@@ -283,12 +266,10 @@ homeassistant.components.lawn_mower.*
homeassistant.components.lcn.*
homeassistant.components.ld2410_ble.*
homeassistant.components.led_ble.*
homeassistant.components.lektrico.*
homeassistant.components.lidarr.*
homeassistant.components.lifx.*
homeassistant.components.light.*
homeassistant.components.linear_garage_door.*
homeassistant.components.linkplay.*
homeassistant.components.litejet.*
homeassistant.components.litterrobot.*
homeassistant.components.local_ip.*
@@ -299,8 +280,7 @@ homeassistant.components.logger.*
homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.luftdaten.*
homeassistant.components.madvr.*
homeassistant.components.manual.*
homeassistant.components.mailbox.*
homeassistant.components.map.*
homeassistant.components.mastodon.*
homeassistant.components.matrix.*
@@ -316,8 +296,6 @@ homeassistant.components.minecraft_server.*
homeassistant.components.mjpeg.*
homeassistant.components.modbus.*
homeassistant.components.modem_callerid.*
homeassistant.components.mold_indicator.*
homeassistant.components.monzo.*
homeassistant.components.moon.*
homeassistant.components.mopeka.*
homeassistant.components.motionmount.*
@@ -343,7 +321,6 @@ homeassistant.components.nut.*
homeassistant.components.onboarding.*
homeassistant.components.oncue.*
homeassistant.components.onewire.*
homeassistant.components.onkyo.*
homeassistant.components.open_meteo.*
homeassistant.components.openexchangerates.*
homeassistant.components.opensky.*
@@ -357,6 +334,7 @@ homeassistant.components.persistent_notification.*
homeassistant.components.pi_hole.*
homeassistant.components.ping.*
homeassistant.components.plugwise.*
homeassistant.components.poolsense.*
homeassistant.components.powerwall.*
homeassistant.components.private_ble_device.*
homeassistant.components.prometheus.*
@@ -383,9 +361,7 @@ homeassistant.components.rest_command.*
homeassistant.components.rfxtrx.*
homeassistant.components.rhasspy.*
homeassistant.components.ridwell.*
homeassistant.components.ring.*
homeassistant.components.rituals_perfume_genie.*
homeassistant.components.roborock.*
homeassistant.components.roku.*
homeassistant.components.romy.*
homeassistant.components.rpi_power.*
@@ -397,13 +373,11 @@ homeassistant.components.samsungtv.*
homeassistant.components.scene.*
homeassistant.components.schedule.*
homeassistant.components.scrape.*
homeassistant.components.script.*
homeassistant.components.search.*
homeassistant.components.select.*
homeassistant.components.sensibo.*
homeassistant.components.sensirion_ble.*
homeassistant.components.sensor.*
homeassistant.components.sensoterra.*
homeassistant.components.senz.*
homeassistant.components.sfr_box.*
homeassistant.components.shelly.*
@@ -415,13 +389,10 @@ homeassistant.components.skybell.*
homeassistant.components.slack.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*
homeassistant.components.snooz.*
homeassistant.components.solarlog.*
homeassistant.components.sonarr.*
homeassistant.components.speedtestdotnet.*
homeassistant.components.sql.*
homeassistant.components.squeezebox.*
homeassistant.components.ssdp.*
homeassistant.components.starlink.*
homeassistant.components.statistics.*
@@ -450,7 +421,6 @@ homeassistant.components.tcp.*
homeassistant.components.technove.*
homeassistant.components.tedee.*
homeassistant.components.text.*
homeassistant.components.thethingsnetwork.*
homeassistant.components.threshold.*
homeassistant.components.tibber.*
homeassistant.components.tile.*
@@ -481,7 +451,6 @@ homeassistant.components.update.*
homeassistant.components.uptime.*
homeassistant.components.uptimerobot.*
homeassistant.components.usb.*
homeassistant.components.uvc.*
homeassistant.components.vacuum.*
homeassistant.components.vallox.*
homeassistant.components.valve.*

42
.vscode/launch.json vendored
View File

@@ -6,52 +6,38 @@
"configurations": [
{
"name": "Home Assistant",
"type": "debugpy",
"type": "python",
"request": "launch",
"module": "homeassistant",
"justMyCode": false,
"args": [
"--debug",
"-c",
"config"
],
"args": ["--debug", "-c", "config"],
"preLaunchTask": "Compile English translations"
},
{
"name": "Home Assistant (skip pip)",
"type": "debugpy",
"type": "python",
"request": "launch",
"module": "homeassistant",
"justMyCode": false,
"args": [
"--debug",
"-c",
"config",
"--skip-pip"
],
"args": ["--debug", "-c", "config", "--skip-pip"],
"preLaunchTask": "Compile English translations"
},
{
"name": "Home Assistant: Changed tests",
"type": "debugpy",
"type": "python",
"request": "launch",
"module": "pytest",
"justMyCode": false,
"args": [
"--timeout=10",
"--picked"
],
"args": ["--timeout=10", "--picked"],
},
{
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
// See https://www.home-assistant.io/integrations/debugpy/
"name": "Home Assistant: Attach Local",
"type": "debugpy",
"type": "python",
"request": "attach",
"connect": {
"port": 5678,
"host": "localhost"
},
"port": 5678,
"host": "localhost",
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
@@ -63,12 +49,10 @@
// Debug by attaching to remote Home Assistant server using Remote Python Debugger.
// See https://www.home-assistant.io/integrations/debugpy/
"name": "Home Assistant: Attach Remote",
"type": "debugpy",
"type": "python",
"request": "attach",
"connect": {
"port": 5678,
"host": "homeassistant.local"
},
"port": 5678,
"host": "homeassistant.local",
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
@@ -77,4 +61,4 @@
]
}
]
}
}

View File

@@ -4,7 +4,5 @@
// https://github.com/microsoft/vscode-python/issues/14067
"python.testing.pytestArgs": ["--no-cov"],
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
"python.testing.pytestEnabled": false,
// https://code.visualstudio.com/docs/python/linting#_general-settings
"pylint.importStrategy": "fromEnvironment"
"python.testing.pytestEnabled": false
}

5
.vscode/tasks.json vendored
View File

@@ -76,7 +76,6 @@
"detail": "Generate code coverage report for a given integration.",
"type": "shell",
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
"dependsOn": ["Compile English translations"],
"group": {
"kind": "test",
"isDefault": true
@@ -104,7 +103,7 @@
{
"label": "Install all Requirements",
"type": "shell",
"command": "uv pip install -r requirements_all.txt",
"command": "pip3 install -r requirements_all.txt",
"group": {
"kind": "build",
"isDefault": true
@@ -118,7 +117,7 @@
{
"label": "Install all Test Requirements",
"type": "shell",
"command": "uv pip install -r requirements_test_all.txt",
"command": "pip3 install -r requirements_test_all.txt",
"group": {
"kind": "build",
"isDefault": true

View File

@@ -5,30 +5,13 @@
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
# Home Assistant Core
.core_files.yaml @home-assistant/core
.git-blame-ignore-revs @home-assistant/core
.gitattributes @home-assistant/core
.gitignore @home-assistant/core
.hadolint.yaml @home-assistant/core
.pre-commit-config.yaml @home-assistant/core
.prettierignore @home-assistant/core
.yamllint @home-assistant/core
setup.cfg @home-assistant/core
pyproject.toml @home-assistant/core
requirements_test.txt @home-assistant/core
/.devcontainer/ @home-assistant/core
/.github/ @home-assistant/core
/.vscode/ @home-assistant/core
/homeassistant/*.py @home-assistant/core
/homeassistant/auth/ @home-assistant/core
/homeassistant/backports/ @home-assistant/core
/homeassistant/helpers/ @home-assistant/core
/homeassistant/scripts/ @home-assistant/core
/homeassistant/util/ @home-assistant/core
/pylint/ @home-assistant/core
/script/ @home-assistant/core
# Home Assistant Supervisor
.dockerignore @home-assistant/supervisor
build.json @home-assistant/supervisor
/machine/ @home-assistant/supervisor
/rootfs/ @home-assistant/supervisor
@@ -48,7 +31,6 @@ build.json @home-assistant/supervisor
/tests/components/adax/ @danielhiversen
/homeassistant/components/adguard/ @frenck
/tests/components/adguard/ @frenck
/homeassistant/components/ads/ @mrpasztoradam
/homeassistant/components/advantage_air/ @Bre77
/tests/components/advantage_air/ @Bre77
/homeassistant/components/aemet/ @Noltari
@@ -57,8 +39,6 @@ build.json @home-assistant/supervisor
/tests/components/agent_dvr/ @ispysoftware
/homeassistant/components/air_quality/ @home-assistant/core
/tests/components/air_quality/ @home-assistant/core
/homeassistant/components/airgradient/ @airgradienthq @joostlek
/tests/components/airgradient/ @airgradienthq @joostlek
/homeassistant/components/airly/ @bieniu
/tests/components/airly/ @bieniu
/homeassistant/components/airnow/ @asymworks
@@ -81,17 +61,18 @@ build.json @home-assistant/supervisor
/tests/components/airzone/ @Noltari
/homeassistant/components/airzone_cloud/ @Noltari
/tests/components/airzone_cloud/ @Noltari
/homeassistant/components/aladdin_connect/ @mkmer
/tests/components/aladdin_connect/ @mkmer
/homeassistant/components/alarm_control_panel/ @home-assistant/core
/tests/components/alarm_control_panel/ @home-assistant/core
/homeassistant/components/alert/ @home-assistant/core @frenck
/tests/components/alert/ @home-assistant/core @frenck
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/homeassistant/components/amazon_polly/ @jschlyter
/homeassistant/components/amberelectric/ @madpilot
/tests/components/amberelectric/ @madpilot
/homeassistant/components/ambient_network/ @thomaskistler
/tests/components/ambient_network/ @thomaskistler
/homeassistant/components/ambiclimate/ @danielhiversen
/tests/components/ambiclimate/ @danielhiversen
/homeassistant/components/ambient_station/ @bachya
/tests/components/ambient_station/ @bachya
/homeassistant/components/amcrest/ @flacjacket
@@ -109,8 +90,6 @@ build.json @home-assistant/supervisor
/tests/components/anova/ @Lash-L
/homeassistant/components/anthemav/ @hyralex
/tests/components/anthemav/ @hyralex
/homeassistant/components/anthropic/ @Shulyaka
/tests/components/anthropic/ @Shulyaka
/homeassistant/components/aosmith/ @bdr99
/tests/components/aosmith/ @bdr99
/homeassistant/components/apache_kafka/ @bachya
@@ -129,23 +108,15 @@ build.json @home-assistant/supervisor
/tests/components/aprilaire/ @chamberlain2007
/homeassistant/components/aprs/ @PhilRW
/tests/components/aprs/ @PhilRW
/homeassistant/components/apsystems/ @mawoka-myblock @SonnenladenGmbH
/tests/components/apsystems/ @mawoka-myblock @SonnenladenGmbH
/homeassistant/components/aquacell/ @Jordi1990
/tests/components/aquacell/ @Jordi1990
/homeassistant/components/aranet/ @aschmitz @thecode @anrijs
/tests/components/aranet/ @aschmitz @thecode @anrijs
/homeassistant/components/aranet/ @aschmitz @thecode
/tests/components/aranet/ @aschmitz @thecode
/homeassistant/components/arcam_fmj/ @elupus
/tests/components/arcam_fmj/ @elupus
/homeassistant/components/arris_tg2492lg/ @vanbalken
/homeassistant/components/arve/ @ikalnyi
/tests/components/arve/ @ikalnyi
/homeassistant/components/aseko_pool_live/ @milanmeu
/tests/components/aseko_pool_live/ @milanmeu
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
/tests/components/assist_pipeline/ @balloob @synesthesiam
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
/tests/components/asuswrt/ @kennedyshead @ollo69
/homeassistant/components/atag/ @MatsNL
@@ -160,8 +131,6 @@ build.json @home-assistant/supervisor
/tests/components/aurora_abb_powerone/ @davet2001
/homeassistant/components/aussie_broadband/ @nickw444 @Bre77
/tests/components/aussie_broadband/ @nickw444 @Bre77
/homeassistant/components/autarco/ @klaasnicolaas
/tests/components/autarco/ @klaasnicolaas
/homeassistant/components/auth/ @home-assistant/core
/tests/components/auth/ @home-assistant/core
/homeassistant/components/automation/ @home-assistant/core
@@ -171,8 +140,6 @@ build.json @home-assistant/supervisor
/tests/components/awair/ @ahayworth @danielsjf
/homeassistant/components/axis/ @Kane610
/tests/components/axis/ @Kane610
/homeassistant/components/azure_data_explorer/ @kaareseras
/tests/components/azure_data_explorer/ @kaareseras
/homeassistant/components/azure_devops/ @timmo001
/tests/components/azure_devops/ @timmo001
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
@@ -192,8 +159,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/binary_sensor/ @home-assistant/core
/tests/components/binary_sensor/ @home-assistant/core
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
/homeassistant/components/blebox/ @bbx-a @swistakm
/tests/components/blebox/ @bbx-a @swistakm
/homeassistant/components/blebox/ @bbx-a @riokuu @swistakm
/tests/components/blebox/ @bbx-a @riokuu @swistakm
/homeassistant/components/blink/ @fronzbot @mkmer
/tests/components/blink/ @fronzbot @mkmer
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
@@ -202,8 +169,7 @@ build.json @home-assistant/supervisor
/tests/components/bluemaestro/ @bdraco
/homeassistant/components/blueprint/ @home-assistant/core
/tests/components/blueprint/ @home-assistant/core
/homeassistant/components/bluesound/ @thrawnarn @LouisChrist
/tests/components/bluesound/ @thrawnarn @LouisChrist
/homeassistant/components/bluesound/ @thrawnarn
/homeassistant/components/bluetooth/ @bdraco
/tests/components/bluetooth/ @bdraco
/homeassistant/components/bluetooth_adapters/ @bdraco
@@ -226,21 +192,17 @@ build.json @home-assistant/supervisor
/tests/components/brottsplatskartan/ @gjohansson-ST
/homeassistant/components/brunt/ @eavanvalkenburg
/tests/components/brunt/ @eavanvalkenburg
/homeassistant/components/bryant_evolution/ @danielsmyers
/tests/components/bryant_evolution/ @danielsmyers
/homeassistant/components/bsblan/ @liudger
/tests/components/bsblan/ @liudger
/homeassistant/components/bt_smarthub/ @typhoon2099
/homeassistant/components/bthome/ @Ernst79 @thecode
/tests/components/bthome/ @Ernst79 @thecode
/homeassistant/components/bthome/ @Ernst79
/tests/components/bthome/ @Ernst79
/homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221
/tests/components/buienradar/ @mjj4791 @ties @Robbie1221
/homeassistant/components/button/ @home-assistant/core
/tests/components/button/ @home-assistant/core
/homeassistant/components/calendar/ @home-assistant/core
/tests/components/calendar/ @home-assistant/core
/homeassistant/components/cambridge_audio/ @noahhusby
/tests/components/cambridge_audio/ @noahhusby
/homeassistant/components/camera/ @home-assistant/core
/tests/components/camera/ @home-assistant/core
/homeassistant/components/cast/ @emontnemery
@@ -249,8 +211,7 @@ build.json @home-assistant/supervisor
/tests/components/ccm15/ @ocalvo
/homeassistant/components/cert_expiry/ @jjlawren
/tests/components/cert_expiry/ @jjlawren
/homeassistant/components/chacon_dio/ @cnico
/tests/components/chacon_dio/ @cnico
/homeassistant/components/circuit/ @braam
/homeassistant/components/cisco_ios/ @fbradyirl
/homeassistant/components/cisco_mobility_express/ @fbradyirl
/homeassistant/components/cisco_webex_teams/ @fbradyirl
@@ -299,8 +260,6 @@ build.json @home-assistant/supervisor
/tests/components/date/ @home-assistant/core
/homeassistant/components/datetime/ @home-assistant/core
/tests/components/datetime/ @home-assistant/core
/homeassistant/components/deako/ @sebirdman @balake @deakolights
/tests/components/deako/ @sebirdman @balake @deakolights
/homeassistant/components/debugpy/ @frenck
/tests/components/debugpy/ @frenck
/homeassistant/components/deconz/ @Kane610
@@ -340,8 +299,8 @@ build.json @home-assistant/supervisor
/tests/components/discovergy/ @jpbede
/homeassistant/components/dlink/ @tkdrob
/tests/components/dlink/ @tkdrob
/homeassistant/components/dlna_dmr/ @chishm
/tests/components/dlna_dmr/ @chishm
/homeassistant/components/dlna_dmr/ @StevenLooman @chishm
/tests/components/dlna_dmr/ @StevenLooman @chishm
/homeassistant/components/dlna_dms/ @chishm
/tests/components/dlna_dms/ @chishm
/homeassistant/components/dnsip/ @gjohansson-ST
@@ -350,18 +309,14 @@ build.json @home-assistant/supervisor
/tests/components/doorbird/ @oblogic7 @bdraco @flacjacket
/homeassistant/components/dormakaba_dkey/ @emontnemery
/tests/components/dormakaba_dkey/ @emontnemery
/homeassistant/components/downloader/ @erwindouna
/tests/components/downloader/ @erwindouna
/homeassistant/components/dremel_3d_printer/ @tkdrob
/tests/components/dremel_3d_printer/ @tkdrob
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
/homeassistant/components/dsmr/ @Robbie1221
/tests/components/dsmr/ @Robbie1221
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
/homeassistant/components/duke_energy/ @hunterjm
/tests/components/duke_energy/ @hunterjm
/homeassistant/components/dsmr/ @Robbie1221 @frenck
/tests/components/dsmr/ @Robbie1221 @frenck
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox
/tests/components/dsmr_reader/ @sorted-bits @glodenox
/homeassistant/components/duotecno/ @cereal2nd
/tests/components/duotecno/ @cereal2nd
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @andarotajo
@@ -376,8 +331,8 @@ build.json @home-assistant/supervisor
/tests/components/ecoforest/ @pjanuario
/homeassistant/components/econet/ @w1ll1am23
/tests/components/econet/ @w1ll1am23
/homeassistant/components/ecovacs/ @mib1185 @edenhaus @Augar
/tests/components/ecovacs/ @mib1185 @edenhaus @Augar
/homeassistant/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
/tests/components/ecovacs/ @OverloadUT @mib1185 @edenhaus @Augar
/homeassistant/components/ecowitt/ @pvizeli
/tests/components/ecowitt/ @pvizeli
/homeassistant/components/efergy/ @tkdrob
@@ -387,8 +342,6 @@ build.json @home-assistant/supervisor
/tests/components/electrasmart/ @jafar-atili
/homeassistant/components/electric_kiwi/ @mikey0000
/tests/components/electric_kiwi/ @mikey0000
/homeassistant/components/elevenlabs/ @sorgfresser
/tests/components/elevenlabs/ @sorgfresser
/homeassistant/components/elgato/ @frenck
/tests/components/elgato/ @frenck
/homeassistant/components/elkm1/ @gwww @bdraco
@@ -399,38 +352,31 @@ build.json @home-assistant/supervisor
/homeassistant/components/elvia/ @ludeeus
/tests/components/elvia/ @ludeeus
/homeassistant/components/emby/ @mezz64
/homeassistant/components/emoncms/ @borpin @alexandrecuer
/tests/components/emoncms/ @borpin @alexandrecuer
/homeassistant/components/emoncms/ @borpin
/homeassistant/components/emonitor/ @bdraco
/tests/components/emonitor/ @bdraco
/homeassistant/components/emulated_hue/ @bdraco @Tho85
/tests/components/emulated_hue/ @bdraco @Tho85
/homeassistant/components/emulated_kasa/ @kbickar
/tests/components/emulated_kasa/ @kbickar
/homeassistant/components/energenie_power_sockets/ @gnumpi
/tests/components/energenie_power_sockets/ @gnumpi
/homeassistant/components/energy/ @home-assistant/core
/tests/components/energy/ @home-assistant/core
/homeassistant/components/energyzero/ @klaasnicolaas
/tests/components/energyzero/ @klaasnicolaas
/homeassistant/components/enigma2/ @autinerd
/tests/components/enigma2/ @autinerd
/homeassistant/components/enocean/ @bdurrer
/tests/components/enocean/ @bdurrer
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
/homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
/homeassistant/components/ephember/ @ttroy50
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
/tests/components/epic_games_store/ @hacf-fr @Quentame
/homeassistant/components/epion/ @lhgravendeel
/tests/components/epion/ @lhgravendeel
/homeassistant/components/epson/ @pszafer
/tests/components/epson/ @pszafer
/homeassistant/components/eq3btsmart/ @eulemitkeule @dbuezas
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
/homeassistant/components/epsonworkforce/ @ThaStealth
/homeassistant/components/escea/ @lazdavila
/tests/components/escea/ @lazdavila
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
@@ -442,7 +388,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/evil_genius_labs/ @balloob
/tests/components/evil_genius_labs/ @balloob
/homeassistant/components/evohome/ @zxdavb
/tests/components/evohome/ @zxdavb
/homeassistant/components/ezviz/ @RenierM26 @baqs
/tests/components/ezviz/ @RenierM26 @baqs
/homeassistant/components/faa_delays/ @ntilley905
@@ -451,8 +396,6 @@ build.json @home-assistant/supervisor
/tests/components/fan/ @home-assistant/core
/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna
/tests/components/fastdotcom/ @rohankapoorcom @erwindouna
/homeassistant/components/feedreader/ @mib1185
/tests/components/feedreader/ @mib1185
/homeassistant/components/fibaro/ @rappenze
/tests/components/fibaro/ @rappenze
/homeassistant/components/file/ @fabaff
@@ -490,8 +433,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/forked_daapd/ @uvjustin
/tests/components/forked_daapd/ @uvjustin
/homeassistant/components/fortios/ @kimfrellsen
/homeassistant/components/foscam/ @krmarien
/tests/components/foscam/ @krmarien
/homeassistant/components/foscam/ @skgsergio @krmarien
/tests/components/foscam/ @skgsergio @krmarien
/homeassistant/components/freebox/ @hacf-fr @Quentame
/tests/components/freebox/ @hacf-fr @Quentame
/homeassistant/components/freedompro/ @stefano055415
@@ -508,12 +451,8 @@ build.json @home-assistant/supervisor
/tests/components/frontend/ @home-assistant/frontend
/homeassistant/components/frontier_silicon/ @wlcrs
/tests/components/frontier_silicon/ @wlcrs
/homeassistant/components/fujitsu_fglair/ @crevetor
/tests/components/fujitsu_fglair/ @crevetor
/homeassistant/components/fully_kiosk/ @cgarwood
/tests/components/fully_kiosk/ @cgarwood
/homeassistant/components/fyta/ @dontinelli
/tests/components/fyta/ @dontinelli
/homeassistant/components/garages_amsterdam/ @klaasnicolaas
/tests/components/garages_amsterdam/ @klaasnicolaas
/homeassistant/components/gardena_bluetooth/ @elupus
@@ -525,7 +464,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/generic_hygrostat/ @Shulyaka
/tests/components/generic_hygrostat/ @Shulyaka
/homeassistant/components/geniushub/ @manzanotti
/tests/components/geniushub/ @manzanotti
/homeassistant/components/geo_json_events/ @exxamalte
/tests/components/geo_json_events/ @exxamalte
/homeassistant/components/geo_location/ @home-assistant/core
@@ -556,14 +494,11 @@ build.json @home-assistant/supervisor
/tests/components/google_assistant/ @home-assistant/cloud
/homeassistant/components/google_assistant_sdk/ @tronikos
/tests/components/google_assistant_sdk/ @tronikos
/homeassistant/components/google_cloud/ @lufton @tronikos
/tests/components/google_cloud/ @lufton @tronikos
/homeassistant/components/google_cloud/ @lufton
/homeassistant/components/google_generative_ai_conversation/ @tronikos
/tests/components/google_generative_ai_conversation/ @tronikos
/homeassistant/components/google_mail/ @tkdrob
/tests/components/google_mail/ @tkdrob
/homeassistant/components/google_photos/ @allenporter
/tests/components/google_photos/ @allenporter
/homeassistant/components/google_sheets/ @tkdrob
/tests/components/google_sheets/ @tkdrob
/homeassistant/components/google_tasks/ @allenporter
@@ -584,14 +519,14 @@ build.json @home-assistant/supervisor
/tests/components/group/ @home-assistant/core
/homeassistant/components/guardian/ @bachya
/tests/components/guardian/ @bachya
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
/homeassistant/components/habitica/ @ASMfreaK @leikoilja
/tests/components/habitica/ @ASMfreaK @leikoilja
/homeassistant/components/hardkernel/ @home-assistant/core
/tests/components/hardkernel/ @home-assistant/core
/homeassistant/components/hardware/ @home-assistant/core
/tests/components/hardware/ @home-assistant/core
/homeassistant/components/harmony/ @ehendrix23 @bdraco @mkeesey @Aohzan
/tests/components/harmony/ @ehendrix23 @bdraco @mkeesey @Aohzan
/homeassistant/components/harmony/ @ehendrix23 @bramkragten @bdraco @mkeesey @Aohzan
/tests/components/harmony/ @ehendrix23 @bramkragten @bdraco @mkeesey @Aohzan
/homeassistant/components/hassio/ @home-assistant/supervisor
/tests/components/hassio/ @home-assistant/supervisor
/homeassistant/components/hdmi_cec/ @inytar
@@ -633,16 +568,12 @@ build.json @home-assistant/supervisor
/tests/components/homekit/ @bdraco
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
/tests/components/homekit_controller/ @Jc2k @bdraco
/homeassistant/components/homematic/ @pvizeli
/tests/components/homematic/ @pvizeli
/homeassistant/components/homematicip_cloud/ @hahn-th
/tests/components/homematicip_cloud/ @hahn-th
/homeassistant/components/homematic/ @pvizeli @danielperna84
/tests/components/homematic/ @pvizeli @danielperna84
/homeassistant/components/homewizard/ @DCSBL
/tests/components/homewizard/ @DCSBL
/homeassistant/components/honeywell/ @rdfurman @mkmer
/tests/components/honeywell/ @rdfurman @mkmer
/homeassistant/components/html5/ @alexyao2015
/tests/components/html5/ @alexyao2015
/homeassistant/components/http/ @home-assistant/core
/tests/components/http/ @home-assistant/core
/homeassistant/components/huawei_lte/ @scop @fphammerle
@@ -661,8 +592,8 @@ build.json @home-assistant/supervisor
/tests/components/huum/ @frwickst
/homeassistant/components/hvv_departures/ @vigonotion
/tests/components/hvv_departures/ @vigonotion
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
/tests/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
/homeassistant/components/hydrawise/ @dknowles2 @ptcryan
/tests/components/hydrawise/ @dknowles2 @ptcryan
/homeassistant/components/hyperion/ @dermotduffy
/tests/components/hyperion/ @dermotduffy
/homeassistant/components/ialarm/ @RyuzakiKK
@@ -686,12 +617,9 @@ build.json @home-assistant/supervisor
/tests/components/image_upload/ @home-assistant/core
/homeassistant/components/imap/ @jbouwh
/tests/components/imap/ @jbouwh
/homeassistant/components/imgw_pib/ @bieniu
/tests/components/imgw_pib/ @bieniu
/homeassistant/components/improv_ble/ @emontnemery
/tests/components/improv_ble/ @emontnemery
/homeassistant/components/incomfort/ @jbouwh
/tests/components/incomfort/ @jbouwh
/homeassistant/components/incomfort/ @zxdavb
/homeassistant/components/influxdb/ @mdegat01
/tests/components/influxdb/ @mdegat01
/homeassistant/components/inkbird/ @bdraco
@@ -721,8 +649,6 @@ build.json @home-assistant/supervisor
/tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
/tests/components/iotawatt/ @gtdiehl @jyavenard
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
/tests/components/iotty/ @pburgio @shapournemati-iotty
/homeassistant/components/iperf3/ @rohankapoorcom
/homeassistant/components/ipma/ @dgomes
/tests/components/ipma/ @dgomes
@@ -731,20 +657,10 @@ build.json @home-assistant/supervisor
/homeassistant/components/iqvia/ @bachya
/tests/components/iqvia/ @bachya
/homeassistant/components/irish_rail_transport/ @ttroy50
/homeassistant/components/iron_os/ @tr4nt0r
/tests/components/iron_os/ @tr4nt0r
/homeassistant/components/isal/ @bdraco
/tests/components/isal/ @bdraco
/homeassistant/components/iskra/ @iskramis
/tests/components/iskra/ @iskramis
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
/homeassistant/components/israel_rail/ @shaiu
/tests/components/israel_rail/ @shaiu
/homeassistant/components/islamic_prayer_times/ @engrbm87
/tests/components/islamic_prayer_times/ @engrbm87
/homeassistant/components/iss/ @DurgNomis-drol
/tests/components/iss/ @DurgNomis-drol
/homeassistant/components/ista_ecotrend/ @tr4nt0r
/tests/components/ista_ecotrend/ @tr4nt0r
/homeassistant/components/isy994/ @bdraco @shbatm
/tests/components/isy994/ @bdraco @shbatm
/homeassistant/components/izone/ @Swamp-Ig
@@ -775,8 +691,6 @@ build.json @home-assistant/supervisor
/tests/components/kitchen_sink/ @home-assistant/core
/homeassistant/components/kmtronic/ @dgomes
/tests/components/kmtronic/ @dgomes
/homeassistant/components/knocki/ @joostlek @jgatto1 @JakeBosh
/tests/components/knocki/ @joostlek @jgatto1 @JakeBosh
/homeassistant/components/knx/ @Julius2342 @farmio @marvin-w
/tests/components/knx/ @Julius2342 @farmio @marvin-w
/homeassistant/components/kodi/ @OnFreund
@@ -813,20 +727,13 @@ build.json @home-assistant/supervisor
/tests/components/leaone/ @bdraco
/homeassistant/components/led_ble/ @bdraco
/tests/components/led_ble/ @bdraco
/homeassistant/components/lektrico/ @lektrico
/tests/components/lektrico/ @lektrico
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
/tests/components/lg_netcast/ @Drafteed @splinter98
/homeassistant/components/lg_netcast/ @Drafteed
/homeassistant/components/lidarr/ @tkdrob
/tests/components/lidarr/ @tkdrob
/homeassistant/components/lifx/ @Djelibeybi
/tests/components/lifx/ @Djelibeybi
/homeassistant/components/light/ @home-assistant/core
/tests/components/light/ @home-assistant/core
/homeassistant/components/linear_garage_door/ @IceBotYT
/tests/components/linear_garage_door/ @IceBotYT
/homeassistant/components/linkplay/ @Velleman
/tests/components/linkplay/ @Velleman
/homeassistant/components/linux_battery/ @fabaff
/homeassistant/components/litejet/ @joncar
/tests/components/litejet/ @joncar
@@ -846,6 +753,8 @@ build.json @home-assistant/supervisor
/tests/components/logbook/ @home-assistant/core
/homeassistant/components/logger/ @home-assistant/core
/tests/components/logger/ @home-assistant/core
/homeassistant/components/logi_circle/ @evanjd
/tests/components/logi_circle/ @evanjd
/homeassistant/components/london_underground/ @jpbede
/tests/components/london_underground/ @jpbede
/homeassistant/components/lookin/ @ANMalko @bdraco
@@ -861,20 +770,15 @@ build.json @home-assistant/supervisor
/tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce
/tests/components/lutron/ @cdheiser @wilburCForce
/homeassistant/components/lutron_caseta/ @swails @danaues @eclair4151
/tests/components/lutron_caseta/ @swails @danaues @eclair4151
/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
/homeassistant/components/lyric/ @timmo001
/tests/components/lyric/ @timmo001
/homeassistant/components/madvr/ @iloveicedgreentea
/tests/components/madvr/ @iloveicedgreentea
/homeassistant/components/mastodon/ @fabaff @andrew-codechimp
/tests/components/mastodon/ @fabaff @andrew-codechimp
/homeassistant/components/mastodon/ @fabaff
/homeassistant/components/matrix/ @PaarthShah
/tests/components/matrix/ @PaarthShah
/homeassistant/components/matter/ @home-assistant/matter
/tests/components/matter/ @home-assistant/matter
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
/tests/components/mealie/ @joostlek @andrew-codechimp
/homeassistant/components/meater/ @Sotolotl @emontnemery
/tests/components/meater/ @Sotolotl @emontnemery
/homeassistant/components/medcom_ble/ @elafargue
@@ -886,8 +790,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/media_source/ @hunterjm
/tests/components/media_source/ @hunterjm
/homeassistant/components/mediaroom/ @dgomes
/homeassistant/components/melcloud/ @erwindouna
/tests/components/melcloud/ @erwindouna
/homeassistant/components/melissa/ @kennedyshead
/tests/components/melissa/ @kennedyshead
/homeassistant/components/melnor/ @vanstinator
@@ -919,32 +821,28 @@ build.json @home-assistant/supervisor
/tests/components/moat/ @bdraco
/homeassistant/components/mobile_app/ @home-assistant/core
/tests/components/mobile_app/ @home-assistant/core
/homeassistant/components/modbus/ @janiversen
/tests/components/modbus/ @janiversen
/homeassistant/components/modem_callerid/ @tkdrob
/tests/components/modem_callerid/ @tkdrob
/homeassistant/components/modern_forms/ @wonderslug
/tests/components/modern_forms/ @wonderslug
/homeassistant/components/moehlenhoff_alpha2/ @j-a-n
/tests/components/moehlenhoff_alpha2/ @j-a-n
/homeassistant/components/monarch_money/ @jeeftor
/tests/components/monarch_money/ @jeeftor
/homeassistant/components/monoprice/ @etsinko @OnFreund
/tests/components/monoprice/ @etsinko @OnFreund
/homeassistant/components/monzo/ @jakemartin-icl
/tests/components/monzo/ @jakemartin-icl
/homeassistant/components/moon/ @fabaff @frenck
/tests/components/moon/ @fabaff @frenck
/homeassistant/components/mopeka/ @bdraco
/tests/components/mopeka/ @bdraco
/homeassistant/components/motion_blinds/ @starkillerOG
/tests/components/motion_blinds/ @starkillerOG
/homeassistant/components/motionblinds_ble/ @LennP @jerrybboy
/tests/components/motionblinds_ble/ @LennP @jerrybboy
/homeassistant/components/motioneye/ @dermotduffy
/tests/components/motioneye/ @dermotduffy
/homeassistant/components/motionmount/ @RJPoelstra
/tests/components/motionmount/ @RJPoelstra
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
/homeassistant/components/mqtt/ @emontnemery @jbouwh
/tests/components/mqtt/ @emontnemery @jbouwh
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
@@ -960,8 +858,8 @@ build.json @home-assistant/supervisor
/tests/components/myuplink/ @pajzo @astrandb
/homeassistant/components/nam/ @bieniu
/tests/components/nam/ @bieniu
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
/tests/components/nanoleaf/ @milanmeu @joostlek
/homeassistant/components/nanoleaf/ @milanmeu
/tests/components/nanoleaf/ @milanmeu
/homeassistant/components/neato/ @Santobert
/tests/components/neato/ @Santobert
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
@@ -990,8 +888,6 @@ build.json @home-assistant/supervisor
/tests/components/nfandroidtv/ @tkdrob
/homeassistant/components/nibe_heatpump/ @elupus
/tests/components/nibe_heatpump/ @elupus
/homeassistant/components/nice_go/ @IceBotYT
/tests/components/nice_go/ @IceBotYT
/homeassistant/components/nightscout/ @marciogranzotto
/tests/components/nightscout/ @marciogranzotto
/homeassistant/components/nilu/ @hfurubotten
@@ -1024,8 +920,6 @@ build.json @home-assistant/supervisor
/tests/components/nut/ @bdraco @ollo69 @pestevez
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
/tests/components/nws/ @MatthewFlamm @kamiyo
/homeassistant/components/nyt_games/ @joostlek
/tests/components/nyt_games/ @joostlek
/homeassistant/components/nzbget/ @chriscla
/tests/components/nzbget/ @chriscla
/homeassistant/components/obihai/ @dshokouhi @ejpenney
@@ -1033,9 +927,9 @@ build.json @home-assistant/supervisor
/homeassistant/components/octoprint/ @rfleming71
/tests/components/octoprint/ @rfleming71
/homeassistant/components/ohmconnect/ @robbiet480
/homeassistant/components/ollama/ @synesthesiam
/tests/components/ollama/ @synesthesiam
/homeassistant/components/ombi/ @larssont
/homeassistant/components/omnilogic/ @oliver84 @djtimca @gentoosu
/tests/components/omnilogic/ @oliver84 @djtimca @gentoosu
/homeassistant/components/onboarding/ @home-assistant/core
/tests/components/onboarding/ @home-assistant/core
/homeassistant/components/oncue/ @bdraco @peterager
@@ -1044,7 +938,6 @@ build.json @home-assistant/supervisor
/tests/components/ondilo_ico/ @JeromeHXP
/homeassistant/components/onewire/ @garbled1 @epenet
/tests/components/onewire/ @garbled1 @epenet
/homeassistant/components/onkyo/ @arturpragacz
/homeassistant/components/onvif/ @hunterjm
/tests/components/onvif/ @hunterjm
/homeassistant/components/open_meteo/ @frenck
@@ -1080,8 +973,8 @@ build.json @home-assistant/supervisor
/tests/components/otbr/ @home-assistant/core
/homeassistant/components/ourgroceries/ @OnFreund
/tests/components/ourgroceries/ @OnFreund
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117
/homeassistant/components/ovo_energy/ @timmo001
/tests/components/ovo_energy/ @timmo001
/homeassistant/components/p1_monitor/ @klaasnicolaas
@@ -1100,10 +993,12 @@ build.json @home-assistant/supervisor
/tests/components/persistent_notification/ @home-assistant/core
/homeassistant/components/philips_js/ @elupus
/tests/components/philips_js/ @elupus
/homeassistant/components/pi_hole/ @shenxn
/tests/components/pi_hole/ @shenxn
/homeassistant/components/pi_hole/ @johnluetke @shenxn
/tests/components/pi_hole/ @johnluetke @shenxn
/homeassistant/components/picnic/ @corneyl
/tests/components/picnic/ @corneyl
/homeassistant/components/pilight/ @trekky12
/tests/components/pilight/ @trekky12
/homeassistant/components/ping/ @jpbede
/tests/components/ping/ @jpbede
/homeassistant/components/plaato/ @JohNan
@@ -1133,8 +1028,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/proximity/ @mib1185
/tests/components/proximity/ @mib1185
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
/homeassistant/components/prusalink/ @balloob
/tests/components/prusalink/ @balloob
/homeassistant/components/prusalink/ @balloob @Skaronator
/tests/components/prusalink/ @balloob @Skaronator
/homeassistant/components/ps4/ @ktnrg45
/tests/components/ps4/ @ktnrg45
/homeassistant/components/pure_energie/ @klaasnicolaas
@@ -1151,12 +1046,10 @@ build.json @home-assistant/supervisor
/tests/components/pvoutput/ @frenck
/homeassistant/components/pvpc_hourly_pricing/ @azogue
/tests/components/pvpc_hourly_pricing/ @azogue
/homeassistant/components/pyload/ @tr4nt0r
/tests/components/pyload/ @tr4nt0r
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
/homeassistant/components/qingping/ @bdraco
/tests/components/qingping/ @bdraco
/homeassistant/components/qingping/ @bdraco @skgsergio
/tests/components/qingping/ @bdraco @skgsergio
/homeassistant/components/qld_bushfire/ @exxamalte
/tests/components/qld_bushfire/ @exxamalte
/homeassistant/components/qnap/ @disforw
@@ -1231,8 +1124,8 @@ build.json @home-assistant/supervisor
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
/homeassistant/components/rmvtransport/ @cgtobi
/tests/components/rmvtransport/ @cgtobi
/homeassistant/components/roborock/ @Lash-L
/tests/components/roborock/ @Lash-L
/homeassistant/components/roborock/ @humbertogontijo @Lash-L
/tests/components/roborock/ @humbertogontijo @Lash-L
/homeassistant/components/roku/ @ctalkington
/tests/components/roku/ @ctalkington
/homeassistant/components/romy/ @xeniter
@@ -1249,21 +1142,17 @@ build.json @home-assistant/supervisor
/tests/components/rtsp_to_webrtc/ @allenporter
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/homeassistant/components/russound_rio/ @noahhusby
/tests/components/russound_rio/ @noahhusby
/homeassistant/components/ruuvi_gateway/ @akx
/tests/components/ruuvi_gateway/ @akx
/homeassistant/components/ruuvitag_ble/ @akx
/tests/components/ruuvitag_ble/ @akx
/homeassistant/components/rympro/ @OnFreund @elad-bar @maorcc
/tests/components/rympro/ @OnFreund @elad-bar @maorcc
/homeassistant/components/sabnzbd/ @shaiu @jpbede
/tests/components/sabnzbd/ @shaiu @jpbede
/homeassistant/components/sabnzbd/ @shaiu
/tests/components/sabnzbd/ @shaiu
/homeassistant/components/saj/ @fredericvl
/homeassistant/components/samsungtv/ @chemelli74 @epenet
/tests/components/samsungtv/ @chemelli74 @epenet
/homeassistant/components/sanix/ @tomaszsluszniak
/tests/components/sanix/ @tomaszsluszniak
/homeassistant/components/scene/ @home-assistant/core
/tests/components/scene/ @home-assistant/core
/homeassistant/components/schedule/ @home-assistant/core
@@ -1295,8 +1184,6 @@ build.json @home-assistant/supervisor
/tests/components/sensorpro/ @bdraco
/homeassistant/components/sensorpush/ @bdraco
/tests/components/sensorpush/ @bdraco
/homeassistant/components/sensoterra/ @markruys
/tests/components/sensoterra/ @markruys
/homeassistant/components/sentry/ @dcramer @frenck
/tests/components/sentry/ @dcramer @frenck
/homeassistant/components/senz/ @milanmeu
@@ -1320,8 +1207,6 @@ build.json @home-assistant/supervisor
/tests/components/sighthound/ @robmarkcole
/homeassistant/components/signal_messenger/ @bbernhard
/tests/components/signal_messenger/ @bbernhard
/homeassistant/components/simplefin/ @scottg489 @jeeftor
/tests/components/simplefin/ @scottg489 @jeeftor
/homeassistant/components/simplepush/ @engrbm87
/tests/components/simplepush/ @engrbm87
/homeassistant/components/simplisafe/ @bachya
@@ -1346,26 +1231,23 @@ build.json @home-assistant/supervisor
/tests/components/smappee/ @bsmappee
/homeassistant/components/smart_meter_texas/ @grahamwetzler
/tests/components/smart_meter_texas/ @grahamwetzler
/homeassistant/components/smartthings/ @andrewsayre
/tests/components/smartthings/ @andrewsayre
/homeassistant/components/smarttub/ @mdz
/tests/components/smarttub/ @mdz
/homeassistant/components/smarty/ @z0mbieprocess
/homeassistant/components/smhi/ @gjohansson-ST
/tests/components/smhi/ @gjohansson-ST
/homeassistant/components/smlight/ @tl-sl
/tests/components/smlight/ @tl-sl
/homeassistant/components/sms/ @ocalvo
/tests/components/sms/ @ocalvo
/homeassistant/components/snapcast/ @luar123
/tests/components/snapcast/ @luar123
/homeassistant/components/snmp/ @nmaggioni
/tests/components/snmp/ @nmaggioni
/homeassistant/components/snooz/ @AustinBrunkhorst
/tests/components/snooz/ @AustinBrunkhorst
/homeassistant/components/solaredge/ @frenck @bdraco
/tests/components/solaredge/ @frenck @bdraco
/homeassistant/components/solaredge/ @frenck
/tests/components/solaredge/ @frenck
/homeassistant/components/solaredge_local/ @drobtravels @scheric
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
/tests/components/solarlog/ @Ernst79 @dontinelli
/homeassistant/components/solarlog/ @Ernst79
/tests/components/solarlog/ @Ernst79
/homeassistant/components/solax/ @squishykid
/tests/components/solax/ @squishykid
/homeassistant/components/soma/ @ratsept @sebfortier2288
@@ -1374,8 +1256,8 @@ build.json @home-assistant/supervisor
/tests/components/sonarr/ @ctalkington
/homeassistant/components/songpal/ @rytilahti @shenxn
/tests/components/songpal/ @rytilahti @shenxn
/homeassistant/components/sonos/ @jjlawren @peterager
/tests/components/sonos/ @jjlawren @peterager
/homeassistant/components/sonos/ @jjlawren
/tests/components/sonos/ @jjlawren
/homeassistant/components/soundtouch/ @kroimon
/tests/components/soundtouch/ @kroimon
/homeassistant/components/spaceapi/ @fabaff
@@ -1434,10 +1316,10 @@ build.json @home-assistant/supervisor
/tests/components/switchbee/ @jafar-atili
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
/tests/components/switcher_kis/ @thecode @YogevBokobza
/homeassistant/components/switchbot_cloud/ @SeraphicRav
/tests/components/switchbot_cloud/ @SeraphicRav
/homeassistant/components/switcher_kis/ @thecode
/tests/components/switcher_kis/ @thecode
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
/homeassistant/components/syncthing/ @zhulik
/tests/components/syncthing/ @zhulik
@@ -1475,8 +1357,6 @@ build.json @home-assistant/supervisor
/tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
/homeassistant/components/tesla_fleet/ @Bre77
/tests/components/tesla_fleet/ @Bre77
/homeassistant/components/tesla_wall_connector/ @einarhauks
/tests/components/tesla_wall_connector/ @einarhauks
/homeassistant/components/teslemetry/ @Bre77
@@ -1490,8 +1370,7 @@ build.json @home-assistant/supervisor
/tests/components/thermobeacon/ @bdraco
/homeassistant/components/thermopro/ @bdraco @h3ss
/tests/components/thermopro/ @bdraco @h3ss
/homeassistant/components/thethingsnetwork/ @angelnu
/tests/components/thethingsnetwork/ @angelnu
/homeassistant/components/thethingsnetwork/ @fabaff
/homeassistant/components/thread/ @home-assistant/core
/tests/components/thread/ @home-assistant/core
/homeassistant/components/tibber/ @danielhiversen
@@ -1515,10 +1394,8 @@ build.json @home-assistant/supervisor
/tests/components/tomorrowio/ @raman325 @lymanepp
/homeassistant/components/totalconnect/ @austinmroczek
/tests/components/totalconnect/ @austinmroczek
/homeassistant/components/touchline_sl/ @jnsgruk
/tests/components/touchline_sl/ @jnsgruk
/homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696
/tests/components/tplink/ @rytilahti @bdraco @sdb9696
/homeassistant/components/tplink/ @rytilahti @thegardenmonkey @bdraco @sdb9696
/tests/components/tplink/ @rytilahti @thegardenmonkey @bdraco @sdb9696
/homeassistant/components/tplink_omada/ @MarkGodwin
/tests/components/tplink_omada/ @MarkGodwin
/homeassistant/components/traccar/ @ludeeus
@@ -1541,8 +1418,6 @@ build.json @home-assistant/supervisor
/tests/components/transmission/ @engrbm87 @JPHutchins
/homeassistant/components/trend/ @jpbede
/tests/components/trend/ @jpbede
/homeassistant/components/triggercmd/ @rvmey
/tests/components/triggercmd/ @rvmey
/homeassistant/components/tts/ @home-assistant/core
/tests/components/tts/ @home-assistant/core
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
@@ -1559,6 +1434,8 @@ build.json @home-assistant/supervisor
/tests/components/unifi/ @Kane610
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
/homeassistant/components/unifiled/ @florisvdk
/homeassistant/components/unifiprotect/ @AngellusMortis @bdraco
/tests/components/unifiprotect/ @AngellusMortis @bdraco
/homeassistant/components/upb/ @gwww
/tests/components/upb/ @gwww
/homeassistant/components/upc_connect/ @pvizeli @fabaff
@@ -1647,8 +1524,6 @@ build.json @home-assistant/supervisor
/tests/components/webostv/ @thecode
/homeassistant/components/websocket_api/ @home-assistant/core
/tests/components/websocket_api/ @home-assistant/core
/homeassistant/components/weheat/ @jesperraemaekers
/tests/components/weheat/ @jesperraemaekers
/homeassistant/components/wemo/ @esev
/tests/components/wemo/ @esev
/homeassistant/components/whirlpool/ @abmantis @mkmer
@@ -1666,10 +1541,8 @@ build.json @home-assistant/supervisor
/tests/components/wiz/ @sbidy
/homeassistant/components/wled/ @frenck
/tests/components/wled/ @frenck
/homeassistant/components/wmspro/ @mback2k
/tests/components/wmspro/ @mback2k
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
/tests/components/wolflink/ @adamkrol93 @mtielen
/homeassistant/components/wolflink/ @adamkrol93
/tests/components/wolflink/ @adamkrol93
/homeassistant/components/workday/ @fabaff @gjohansson-ST
/tests/components/workday/ @fabaff @gjohansson-ST
/homeassistant/components/worldclock/ @fabaff
@@ -1688,8 +1561,6 @@ build.json @home-assistant/supervisor
/tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG
/homeassistant/components/xiaomi_tv/ @simse
/homeassistant/components/xmpp/ @fabaff @flowolf
/homeassistant/components/yale/ @bdraco
/tests/components/yale/ @bdraco
/homeassistant/components/yale_smart_alarm/ @gjohansson-ST
/tests/components/yale_smart_alarm/ @gjohansson-ST
/homeassistant/components/yalexs_ble/ @bdraco

View File

@@ -5,7 +5,7 @@
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.

View File

@@ -6,37 +6,47 @@ FROM ${BUILD_FROM}
# Synchronize with homeassistant/core.py:async_stop
ENV \
S6_SERVICES_GRACETIME=240000 \
UV_SYSTEM_PYTHON=true
S6_SERVICES_GRACETIME=240000
ARG QEMU_CPU
# Install uv
RUN pip3 install uv==0.4.15
WORKDIR /usr/src
## Setup Home Assistant Core dependencies
COPY requirements.txt homeassistant/
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
RUN \
uv pip install \
--no-build \
pip3 install \
--only-binary=:all: \
-r homeassistant/requirements.txt
COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/
RUN \
if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \
uv pip install homeassistant/home_assistant_*.whl; \
if ls homeassistant/home_assistant_frontend*.whl 1> /dev/null 2>&1; then \
pip3 install homeassistant/home_assistant_frontend-*.whl; \
fi \
&& uv pip install \
--no-build \
-r homeassistant/requirements_all.txt
&& if ls homeassistant/home_assistant_intents*.whl 1> /dev/null 2>&1; then \
pip3 install homeassistant/home_assistant_intents-*.whl; \
fi \
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
linux32 pip3 install \
--only-binary=:all: \
-r homeassistant/requirements_all.txt; \
else \
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
pip3 install \
--only-binary=:all: \
-r homeassistant/requirements_all.txt; \
fi
## Setup Home Assistant Core
COPY . homeassistant/
RUN \
uv pip install \
pip3 install \
--only-binary=:all: \
-e ./homeassistant \
&& python3 -m compileall \
homeassistant/homeassistant

View File

@@ -22,7 +22,6 @@ RUN \
libavcodec-dev \
libavdevice-dev \
libavutil-dev \
libgammu-dev \
libswscale-dev \
libswresample-dev \
libavfilter-dev \
@@ -35,31 +34,21 @@ RUN \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install uv
RUN pip3 install uv
WORKDIR /usr/src
# Setup hass-release
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
&& uv pip install --system -e hass-release/ \
&& chown -R vscode /usr/src/hass-release/data
&& pip3 install -e hass-release/
USER vscode
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
RUN uv venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
WORKDIR /tmp
WORKDIR /workspaces
# Install Python dependencies from requirements
COPY requirements.txt ./
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
RUN uv pip install -r requirements.txt
RUN pip3 install -r requirements.txt
COPY requirements_test.txt requirements_test_pre_commit.txt ./
RUN uv pip install -r requirements_test.txt
WORKDIR /workspaces
RUN pip3 install -r requirements_test.txt
RUN rm -rf requirements.txt requirements_test.txt requirements_test_pre_commit.txt homeassistant/
# Set the default shell to bash instead of sh
ENV SHELL /bin/bash

View File

@@ -7,8 +7,6 @@ Check out `home-assistant.io <https://home-assistant.io>`__ for `a
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
This is a project of the `Open Home Foundation <https://www.openhomefoundation.org/>`__.
|screenshot-states|
Featured integrations
@@ -27,4 +25,4 @@ of a component, check the `Home Assistant help section <https://home-assistant.i
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
:target: https://demo.home-assistant.io
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
:target: https://home-assistant.io/integrations/
:target: https://home-assistant.io/integrations/

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.1
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -4,7 +4,7 @@ coverage:
status:
project:
default:
target: auto
target: 90
threshold: 0.09
required:
target: auto

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import argparse
from contextlib import suppress
import faulthandler
import os
import sys
@@ -147,7 +146,9 @@ def get_arguments() -> argparse.Namespace:
help="Skips validation of operating system",
)
return parser.parse_args()
arguments = parser.parse_args()
return arguments
def check_threads() -> None:
@@ -209,10 +210,8 @@ def main() -> int:
exit_code = runner.run(runtime_conf)
faulthandler.disable()
# It's possible for the fault file to disappear, so suppress obvious errors
with suppress(FileNotFoundError):
if os.path.getsize(fault_file_name) == 0:
os.remove(fault_file_name)
if os.path.getsize(fault_file_name) == 0:
os.remove(fault_file_name)
check_threads()

View File

@@ -28,15 +28,14 @@ from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRA
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
from .models import AuthFlowResult
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
from .providers.homeassistant import HassAuthProvider
EVENT_USER_ADDED = "user_added"
EVENT_USER_UPDATED = "user_updated"
EVENT_USER_REMOVED = "user_removed"
type _MfaModuleDict = dict[str, MultiFactorAuthModule]
type _ProviderKey = tuple[str, str | None]
type _ProviderDict = dict[_ProviderKey, AuthProvider]
_MfaModuleDict = dict[str, MultiFactorAuthModule]
_ProviderKey = tuple[str, str | None]
_ProviderDict = dict[_ProviderKey, AuthProvider]
class InvalidAuthError(Exception):
@@ -54,7 +53,7 @@ async def auth_manager_from_config(
) -> AuthManager:
"""Initialize an auth manager from config.
CORE_CONFIG_SCHEMA will make sure no duplicated auth providers or
CORE_CONFIG_SCHEMA will make sure do duplicated auth providers or
mfa modules exist in configs.
"""
store = auth_store.AuthStore(hass)
@@ -74,13 +73,6 @@ async def auth_manager_from_config(
key = (provider.type, provider.id)
provider_hash[key] = provider
if isinstance(provider, HassAuthProvider):
# Can be removed in 2026.7 with the legacy mode of homeassistant auth provider
# We need to initialize the provider to create the repair if needed as otherwise
# the provider will be initialized on first use, which could be rare as users
# don't frequently change auth settings
await provider.async_initialize()
if module_configs:
modules = await asyncio.gather(
*(auth_mfa_module_from_config(hass, config) for config in module_configs)
@@ -93,7 +85,7 @@ async def auth_manager_from_config(
module_hash[module.id] = module
manager = AuthManager(hass, store, provider_hash, module_hash)
await manager.async_setup()
manager.async_setup()
return manager
@@ -127,11 +119,7 @@ class AuthManagerFlowManager(
flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]],
result: AuthFlowResult,
) -> AuthFlowResult:
"""Return a user as result of login flow.
This method is called when a flow step returns FlowResultType.ABORT or
FlowResultType.CREATE_ENTRY.
"""
"""Return a user as result of login flow."""
flow = cast(LoginFlow, flow)
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
@@ -193,7 +181,8 @@ class AuthManager:
self._async_remove_expired_refresh_tokens, job_type=HassJobType.Callback
)
async def async_setup(self) -> None:
@callback
def async_setup(self) -> None:
"""Set up the auth manager."""
hass = self.hass
hass.async_add_shutdown_job(
@@ -367,15 +356,15 @@ class AuthManager:
local_only: bool | None = None,
) -> None:
"""Update a user."""
kwargs: dict[str, Any] = {
attr_name: value
for attr_name, value in (
("name", name),
("group_ids", group_ids),
("local_only", local_only),
)
if value is not None
}
kwargs: dict[str, Any] = {}
for attr_name, value in (
("name", name),
("group_ids", group_ids),
("local_only", local_only),
):
if value is not None:
kwargs[attr_name] = value
await self._store.async_update_user(user, **kwargs)
if is_active is not None:
@@ -386,13 +375,6 @@ class AuthManager:
self.hass.bus.async_fire(EVENT_USER_UPDATED, {"user_id": user.id})
@callback
def async_update_user_credentials_data(
self, credentials: models.Credentials, data: dict[str, Any]
) -> None:
"""Update credentials data."""
self._store.async_update_user_credentials_data(credentials, data=data)
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
await self._store.async_activate_user(user)
@@ -535,13 +517,6 @@ class AuthManager:
for revoke_callback in callbacks:
revoke_callback()
@callback
def async_set_expiry(
self, refresh_token: models.RefreshToken, *, enable_expiry: bool
) -> None:
"""Enable or disable expiry of a refresh token."""
self._store.async_set_expiry(refresh_token, enable_expiry=enable_expiry)
@callback
def _async_remove_expired_refresh_tokens(self, _: datetime | None = None) -> None:
"""Remove expired refresh tokens."""

View File

@@ -62,7 +62,6 @@ class AuthStore:
self._store = Store[dict[str, list[dict[str, Any]]]](
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
)
self._token_id_to_user_id: dict[str, str] = {}
async def async_get_groups(self) -> list[models.Group]:
"""Retrieve all users."""
@@ -105,18 +104,14 @@ class AuthStore:
"perm_lookup": self._perm_lookup,
}
kwargs.update(
{
attr_name: value
for attr_name, value in (
("is_owner", is_owner),
("is_active", is_active),
("local_only", local_only),
("system_generated", system_generated),
)
if value is not None
}
)
for attr_name, value in (
("is_owner", is_owner),
("is_active", is_active),
("local_only", local_only),
("system_generated", system_generated),
):
if value is not None:
kwargs[attr_name] = value
new_user = models.User(**kwargs)
@@ -140,10 +135,7 @@ class AuthStore:
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
user = self._users.pop(user.id)
for refresh_token_id in user.refresh_tokens:
del self._token_id_to_user_id[refresh_token_id]
user.refresh_tokens.clear()
self._users.pop(user.id)
self._async_schedule_save()
async def async_update_user(
@@ -226,9 +218,7 @@ class AuthStore:
kwargs["client_icon"] = client_icon
refresh_token = models.RefreshToken(**kwargs)
token_id = refresh_token.id
user.refresh_tokens[token_id] = refresh_token
self._token_id_to_user_id[token_id] = user.id
user.refresh_tokens[refresh_token.id] = refresh_token
self._async_schedule_save()
return refresh_token
@@ -236,17 +226,19 @@ class AuthStore:
@callback
def async_remove_refresh_token(self, refresh_token: models.RefreshToken) -> None:
"""Remove a refresh token."""
refresh_token_id = refresh_token.id
if user_id := self._token_id_to_user_id.get(refresh_token_id):
del self._users[user_id].refresh_tokens[refresh_token_id]
del self._token_id_to_user_id[refresh_token_id]
self._async_schedule_save()
for user in self._users.values():
if user.refresh_tokens.pop(refresh_token.id, None):
self._async_schedule_save()
break
@callback
def async_get_refresh_token(self, token_id: str) -> models.RefreshToken | None:
"""Get refresh token by id."""
if user_id := self._token_id_to_user_id.get(token_id):
return self._users[user_id].refresh_tokens.get(token_id)
for user in self._users.values():
refresh_token = user.refresh_tokens.get(token_id)
if refresh_token is not None:
return refresh_token
return None
@callback
@@ -285,29 +277,6 @@ class AuthStore:
)
self._async_schedule_save()
@callback
def async_set_expiry(
self, refresh_token: models.RefreshToken, *, enable_expiry: bool
) -> None:
"""Enable or disable expiry of a refresh token."""
if enable_expiry:
if refresh_token.expire_at is None:
refresh_token.expire_at = (
refresh_token.last_used_at or dt_util.utcnow()
).timestamp() + REFRESH_TOKEN_EXPIRATION
self._async_schedule_save()
else:
refresh_token.expire_at = None
self._async_schedule_save()
@callback
def async_update_user_credentials_data(
self, credentials: models.Credentials, data: dict[str, Any]
) -> None:
"""Update credentials data."""
credentials.data = data
self._async_schedule_save()
async def async_load(self) -> None: # noqa: C901
"""Load the users."""
if self._loaded:
@@ -321,6 +290,8 @@ class AuthStore:
perm_lookup = PermissionLookup(ent_reg, dev_reg)
self._perm_lookup = perm_lookup
now_ts = dt_util.utcnow().timestamp()
if data is None or not isinstance(data, dict):
self._set_defaults()
return
@@ -474,6 +445,14 @@ class AuthStore:
else:
last_used_at = None
if (
expire_at := rt_dict.get("expire_at")
) is None and token_type == models.TOKEN_TYPE_NORMAL:
if last_used_at:
expire_at = last_used_at.timestamp() + REFRESH_TOKEN_EXPIRATION
else:
expire_at = now_ts + REFRESH_TOKEN_EXPIRATION
token = models.RefreshToken(
id=rt_dict["id"],
user=users[rt_dict["user_id"]],
@@ -490,7 +469,7 @@ class AuthStore:
jwt_key=rt_dict["jwt_key"],
last_used_at=last_used_at,
last_used_ip=rt_dict.get("last_used_ip"),
expire_at=rt_dict.get("expire_at"),
expire_at=expire_at,
version=rt_dict.get("version"),
)
if "credential_id" in rt_dict:
@@ -499,17 +478,8 @@ class AuthStore:
self._groups = groups
self._users = users
self._build_token_id_to_user_id()
self._async_schedule_save(INITIAL_LOAD_SAVE_DELAY)
@callback
def _build_token_id_to_user_id(self) -> None:
"""Build a map of token id to user id."""
self._token_id_to_user_id = {
token_id: user_id
for user_id, user in self._users.items()
for token_id in user.refresh_tokens
}
self._async_schedule_save(INITIAL_LOAD_SAVE_DELAY)
@callback
def _async_schedule_save(self, delay: float = DEFAULT_SAVE_DELAY) -> None:
@@ -604,7 +574,6 @@ class AuthStore:
read_only_group = _system_read_only_group()
groups[read_only_group.id] = read_only_group
self._groups = groups
self._build_token_id_to_user_id()
def _system_admin_group() -> models.Group:

View File

@@ -78,7 +78,7 @@ class _PyJWTWithVerify(PyJWT):
key: str,
algorithms: list[str],
issuer: str | None = None,
leeway: float | timedelta = 0,
leeway: int | float | timedelta = 0,
options: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Verify a JWT's signature and claims."""

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import importlib
import logging
import types
from typing import Any
@@ -14,9 +15,7 @@ from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.importlib import async_import_module
from homeassistant.util.decorator import Registry
from homeassistant.util.hass_dict import HassKey
MULTI_FACTOR_AUTH_MODULES: Registry[str, type[MultiFactorAuthModule]] = Registry()
@@ -30,7 +29,7 @@ MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema(
extra=vol.ALLOW_EXTRA,
)
DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
DATA_REQS = "mfa_auth_module_reqs_processed"
_LOGGER = logging.getLogger(__name__)
@@ -150,7 +149,7 @@ async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.Modul
module_path = f"homeassistant.auth.mfa_modules.{module_name}"
try:
module = await async_import_module(hass, module_path)
module = importlib.import_module(module_path)
except ImportError as err:
_LOGGER.error("Unable to load mfa module %s: %s", module_name, err)
raise HomeAssistantError(

View File

@@ -88,7 +88,7 @@ class NotifySetting:
target: str | None = attr.ib(default=None)
type _UsersDict = dict[str, NotifySetting]
_UsersDict = dict[str, NotifySetting]
@MULTI_FACTOR_AUTH_MODULES.register("notify")

View File

@@ -3,9 +3,8 @@
from __future__ import annotations
from datetime import datetime, timedelta
from functools import cached_property
import secrets
from typing import Any, NamedTuple
from typing import TYPE_CHECKING, Any, NamedTuple
import uuid
import attr
@@ -19,6 +18,12 @@ from homeassistant.util import dt as dt_util
from . import permissions as perm_mdl
from .const import GROUP_ID_ADMIN
if TYPE_CHECKING:
from functools import cached_property
else:
from homeassistant.backports.functools import cached_property
TOKEN_TYPE_NORMAL = "normal"
TOKEN_TYPE_SYSTEM = "system"
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
@@ -86,7 +91,11 @@ class User:
def invalidate_cache(self) -> None:
"""Invalidate permission and is_admin cache."""
for attr_to_invalidate in ("permissions", "is_admin"):
self.__dict__.pop(attr_to_invalidate, None)
# try is must more efficient than suppress
try: # noqa: SIM105
delattr(self, attr_to_invalidate)
except AttributeError:
pass
@attr.s(slots=True)

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
from collections.abc import Callable
from typing import Any
import voluptuous as vol
@@ -63,7 +64,7 @@ class PolicyPermissions(AbstractPermissions):
"""Return a function that can test entity access."""
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
def __eq__(self, other: object) -> bool:
def __eq__(self, other: Any) -> bool:
"""Equals check."""
return isinstance(other, PolicyPermissions) and other._policy == self._policy

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from typing import Any, Final
from typing import Final
from homeassistant.const import (
EVENT_COMPONENT_LOADED,
@@ -18,17 +18,13 @@ from homeassistant.const import (
EVENT_THEMES_UPDATED,
)
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED
from homeassistant.util.event_type import EventType
# These are events that do not contain any sensitive data
# Except for state_changed, which is handled accordingly.
SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
SUBSCRIBE_ALLOWLIST: Final[set[str]] = {
EVENT_AREA_REGISTRY_UPDATED,
EVENT_COMPONENT_LOADED,
EVENT_CORE_CONFIG_UPDATE,
@@ -44,7 +40,4 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
EVENT_SHOPPING_LIST_UPDATED,
EVENT_STATE_CHANGED,
EVENT_THEMES_UPDATED,
EVENT_LABEL_REGISTRY_UPDATED,
EVENT_CATEGORY_REGISTRY_UPDATED,
EVENT_FLOOR_REGISTRY_UPDATED,
}

View File

@@ -58,7 +58,10 @@ def _merge_policies(sources: list[CategoryType]) -> CategoryType:
continue
seen.add(key)
key_sources = [src.get(key) for src in sources if isinstance(src, dict)]
key_sources = []
for src in sources:
if isinstance(src, dict):
key_sources.append(src.get(key))
policy[key] = _merge_policies(key_sources)

View File

@@ -4,17 +4,17 @@ from collections.abc import Mapping
# MyPy doesn't support recursion yet. So writing it out as far as we need.
type ValueType = (
ValueType = (
# Example: entities.all = { read: true, control: true }
Mapping[str, bool] | bool | None
)
# Example: entities.domains = { light: … }
type SubCategoryDict = Mapping[str, ValueType]
SubCategoryDict = Mapping[str, ValueType]
type SubCategoryType = SubCategoryDict | bool | None
SubCategoryType = SubCategoryDict | bool | None
type CategoryType = (
CategoryType = (
# Example: entities.domains
Mapping[str, SubCategoryType]
# Example: entities.all
@@ -24,4 +24,4 @@ type CategoryType = (
)
# Example: { entities: … }
type PolicyType = Mapping[str, CategoryType]
PolicyType = Mapping[str, CategoryType]

View File

@@ -10,8 +10,8 @@ from .const import SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
type LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], ValueType | None]
type SubCatLookupType = dict[str, LookupFunc]
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], ValueType | None]
SubCatLookupType = dict[str, LookupFunc]
def lookup_all(

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
from collections.abc import Mapping
import importlib
import logging
import types
from typing import Any
@@ -14,17 +15,15 @@ from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.importlib import async_import_module
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from homeassistant.util.hass_dict import HassKey
from ..auth_store import AuthStore
from ..const import MFA_SESSION_EXPIRATION
from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta
_LOGGER = logging.getLogger(__name__)
DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed")
DATA_REQS = "auth_prov_reqs_processed"
AUTH_PROVIDERS: Registry[str, type[AuthProvider]] = Registry()
@@ -158,9 +157,7 @@ async def load_auth_provider_module(
) -> types.ModuleType:
"""Load an auth provider."""
try:
module = await async_import_module(
hass, f"homeassistant.auth.providers.{provider}"
)
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
except ImportError as err:
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
raise HomeAssistantError(

View File

@@ -14,7 +14,6 @@ import voluptuous as vol
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.storage import Store
from ..models import AuthFlowResult, Credentials, UserMeta
@@ -55,27 +54,6 @@ class InvalidUser(HomeAssistantError):
Will not be raised when validating authentication.
"""
def __init__(
self,
*args: object,
translation_key: str | None = None,
translation_placeholders: dict[str, str] | None = None,
) -> None:
"""Initialize exception."""
super().__init__(
*args,
translation_domain="auth",
translation_key=translation_key,
translation_placeholders=translation_placeholders,
)
class InvalidUsername(InvalidUser):
"""Raised when invalid username is specified.
Will not be raised when validating authentication.
"""
class Data:
"""Hold the user data."""
@@ -89,15 +67,13 @@ class Data:
self._data: dict[str, list[dict[str, str]]] | None = None
# Legacy mode will allow usernames to start/end with whitespace
# and will compare usernames case-insensitive.
# Deprecated in June 2019 and will be removed in 2026.7
# Remove in 2020 or when we launch 1.0.
self.is_legacy = False
@callback
def normalize_username(
self, username: str, *, force_normalize: bool = False
) -> str:
def normalize_username(self, username: str) -> str:
"""Normalize a username based on the mode."""
if self.is_legacy and not force_normalize:
if self.is_legacy:
return username
return username.strip().casefold()
@@ -107,49 +83,44 @@ class Data:
if (data := await self._store.async_load()) is None:
data = cast(dict[str, list[dict[str, str]]], {"users": []})
self._async_check_for_not_normalized_usernames(data)
self._data = data
@callback
def _async_check_for_not_normalized_usernames(
self, data: dict[str, list[dict[str, str]]]
) -> None:
not_normalized_usernames: set[str] = set()
seen: set[str] = set()
for user in data["users"]:
username = user["username"]
if self.normalize_username(username, force_normalize=True) != username:
# check if we have duplicates
if (folded := username.casefold()) in seen:
self.is_legacy = True
logging.getLogger(__name__).warning(
(
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that are normalized (lowercase and without spaces)."
" Please change the username: '%s'."
"because we detected usernames that are case-insensitive"
"equivalent. Please change the username: '%s'."
),
username,
)
not_normalized_usernames.add(username)
if not_normalized_usernames:
self.is_legacy = True
ir.async_create_issue(
self.hass,
"auth",
"homeassistant_provider_not_normalized_usernames",
breaks_in_ha_version="2026.7.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="homeassistant_provider_not_normalized_usernames",
translation_placeholders={
"usernames": f'- "{'"\n- "'.join(sorted(not_normalized_usernames))}"'
},
learn_more_url="homeassistant://config/users",
)
else:
self.is_legacy = False
ir.async_delete_issue(
self.hass, "auth", "homeassistant_provider_not_normalized_usernames"
)
break
seen.add(folded)
# check if we have unstripped usernames
if username != username.strip():
self.is_legacy = True
logging.getLogger(__name__).warning(
(
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that start or end in a "
"space. Please change the username: '%s'."
),
username,
)
break
self._data = data
@property
def users(self) -> list[dict[str, str]]:
@@ -191,11 +162,13 @@ class Data:
return hashed
def add_auth(self, username: str, password: str) -> None:
"""Add a new authenticated user/pass.
"""Add a new authenticated user/pass."""
username = self.normalize_username(username)
Raises InvalidUsername if the new username is invalid.
"""
self._validate_new_username(username)
if any(
self.normalize_username(user["username"]) == username for user in self.users
):
raise InvalidUser
self.users.append(
{
@@ -216,7 +189,7 @@ class Data:
break
if index is None:
raise InvalidUser(translation_key="user_not_found")
raise InvalidUser
self.users.pop(index)
@@ -232,50 +205,7 @@ class Data:
user["password"] = self.hash_password(new_password, True).decode()
break
else:
raise InvalidUser(translation_key="user_not_found")
@callback
def _validate_new_username(self, new_username: str) -> None:
"""Validate that username is normalized and unique.
Raises InvalidUsername if the new username is invalid.
"""
normalized_username = self.normalize_username(
new_username, force_normalize=True
)
if normalized_username != new_username:
raise InvalidUsername(
translation_key="username_not_normalized",
translation_placeholders={"new_username": new_username},
)
if any(
self.normalize_username(user["username"]) == normalized_username
for user in self.users
):
raise InvalidUsername(
translation_key="username_already_exists",
translation_placeholders={"username": new_username},
)
@callback
def change_username(self, username: str, new_username: str) -> None:
"""Update the username.
Raises InvalidUser if user cannot be found.
Raises InvalidUsername if the new username is invalid.
"""
username = self.normalize_username(username)
self._validate_new_username(new_username)
for user in self.users:
if self.normalize_username(user["username"]) == username:
user["username"] = new_username
assert self._data is not None
self._async_check_for_not_normalized_usernames(self._data)
break
else:
raise InvalidUser(translation_key="user_not_found")
raise InvalidUser
async def async_save(self) -> None:
"""Save data."""
@@ -348,20 +278,6 @@ class HassAuthProvider(AuthProvider):
)
await self.data.async_save()
async def async_change_username(
self, credential: Credentials, new_username: str
) -> None:
"""Validate new username and change it including updating credentials object."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
self.data.change_username(credential.data["username"], new_username)
self.hass.auth.async_update_user_credentials_data(
credential, {**credential.data, "username": new_username}
)
await self.data.async_save()
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:

View File

@@ -0,0 +1,123 @@
"""Support Legacy API password auth provider.
It will be removed when auth system production ready
"""
from __future__ import annotations
from collections.abc import Mapping
import hmac
from typing import Any, cast
import voluptuous as vol
from homeassistant.core import async_get_hass, callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from ..models import AuthFlowResult, Credentials, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
AUTH_PROVIDER_TYPE = "legacy_api_password"
CONF_API_PASSWORD = "api_password"
_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
)
def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]:
async_create_issue(
async_get_hass(),
"auth",
"deprecated_legacy_api_password",
breaks_in_ha_version="2024.6.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="deprecated_legacy_api_password",
)
return _CONFIG_SCHEMA(config) # type: ignore[no-any-return]
CONFIG_SCHEMA = _create_repair_and_validate
LEGACY_USER_NAME = "Legacy API password user"
class InvalidAuthError(HomeAssistantError):
"""Raised when submitting invalid authentication."""
@AUTH_PROVIDERS.register(AUTH_PROVIDER_TYPE)
class LegacyApiPasswordAuthProvider(AuthProvider):
"""An auth provider support legacy api_password."""
DEFAULT_TITLE = "Legacy API Password"
@property
def api_password(self) -> str:
"""Return api_password."""
return str(self.config[CONF_API_PASSWORD])
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
"""Return a flow to login."""
return LegacyLoginFlow(self)
@callback
def async_validate_login(self, password: str) -> None:
"""Validate password."""
api_password = str(self.config[CONF_API_PASSWORD])
if not hmac.compare_digest(
api_password.encode("utf-8"), password.encode("utf-8")
):
raise InvalidAuthError
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:
"""Return credentials for this login."""
credentials = await self.async_credentials()
if credentials:
return credentials[0]
return self.async_create_credentials({})
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return info for the user.
Will be used to populate info when creating a new user.
"""
return UserMeta(name=LEGACY_USER_NAME, is_active=True)
class LegacyLoginFlow(LoginFlow):
"""Handler for the login flow."""
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
"""Handle the step of the form."""
errors = {}
if user_input is not None:
try:
cast(
LegacyApiPasswordAuthProvider, self._auth_provider
).async_validate_login(user_input["password"])
except InvalidAuthError:
errors["base"] = "invalid_auth"
if not errors:
return await self.async_finish({})
return self.async_show_form(
step_id="init",
data_schema=vol.Schema({vol.Required("password"): str}),
errors=errors,
)

View File

@@ -28,8 +28,8 @@ from .. import InvalidAuthError
from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
type IPAddress = IPv4Address | IPv6Address
type IPNetwork = IPv4Network | IPv6Network
IPAddress = IPv4Address | IPv6Address
IPNetwork = IPv4Network | IPv6Network
CONF_TRUSTED_NETWORKS = "trusted_networks"
CONF_TRUSTED_USERS = "trusted_users"

View File

@@ -9,21 +9,8 @@ import it.
from __future__ import annotations
from enum import StrEnum as _StrEnum
from functools import partial
from enum import StrEnum
from homeassistant.helpers.deprecation import (
DeprecatedAlias,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())
__all__ = [
"StrEnum",
]

View File

@@ -1,30 +1,81 @@
"""Functools backports from standard lib.
"""Functools backports from standard lib."""
This file contained the backport of the cached_property implementation of Python 3.12.
Since we have dropped support for Python 3.11, we can remove this backport.
This file is kept for now to avoid breaking custom components that might
import it.
"""
# This file contains parts of Python's module wrapper
# for the _functools C module
# to allow utilities written in Python to be added
# to the functools module.
# Written by Nick Coghlan <ncoghlan at gmail.com>,
# Raymond Hettinger <python at rcn.com>,
# and Łukasz Langa <lukasz at langa.pl>.
# Copyright © 2001-2023 Python Software Foundation; All Rights Reserved
from __future__ import annotations
from functools import cached_property as _cached_property, partial
from collections.abc import Callable
from types import GenericAlias
from typing import Any, Generic, Self, TypeVar, overload
from homeassistant.helpers.deprecation import (
DeprecatedAlias,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
_T = TypeVar("_T")
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
_DEPRECATED_cached_property = DeprecatedAlias(
_cached_property, "functools.cached_property", "2025.5"
)
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())
class cached_property(Generic[_T]):
"""Backport of Python 3.12's cached_property.
Includes https://github.com/python/cpython/pull/101890/files
"""
def __init__(self, func: Callable[[Any], _T]) -> None:
"""Initialize."""
self.func: Callable[[Any], _T] = func
self.attrname: str | None = None
self.__doc__ = func.__doc__
def __set_name__(self, owner: type[Any], name: str) -> None:
"""Set name."""
if self.attrname is None:
self.attrname = name
elif name != self.attrname:
raise TypeError(
"Cannot assign the same cached_property to two different names "
f"({self.attrname!r} and {name!r})."
)
@overload
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
...
@overload
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T:
...
def __get__(
self, instance: Any | None, owner: type[Any] | None = None
) -> _T | Self:
"""Get."""
if instance is None:
return self
if self.attrname is None:
raise TypeError(
"Cannot use cached_property instance without calling __set_name__ on it."
)
try:
cache = instance.__dict__
# not all objects have __dict__ (e.g. class defines slots)
except AttributeError:
msg = (
f"No '__dict__' attribute on {type(instance).__name__!r} "
f"instance to cache {self.attrname!r} property."
)
raise TypeError(msg) from None
val = self.func(instance)
try:
cache[self.attrname] = val
except TypeError:
msg = (
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
f"does not support item assignment for caching {self.attrname!r} property."
)
raise TypeError(msg) from None
return val
__class_getitem__ = classmethod(GenericAlias) # type: ignore[var-annotated]

View File

@@ -1,252 +1,21 @@
"""Block blocking calls being done in asyncio."""
import builtins
from collections.abc import Callable
from contextlib import suppress
from dataclasses import dataclass
import glob
from http.client import HTTPConnection
import importlib
import os
from pathlib import Path
from ssl import SSLContext
import sys
import threading
import time
from typing import Any
from .helpers.frame import get_current_frame
from .util.loop import protect_loop
_IN_TESTS = "unittest" in sys.modules
ALLOWED_FILE_PREFIXES = ("/proc",)
def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
# If the module is already imported, we can ignore it.
return bool((args := mapped_args.get("args")) and args[0] in sys.modules)
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
# If the file is in /proc we can ignore it.
args = mapped_args["args"]
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
return path.startswith(ALLOWED_FILE_PREFIXES)
def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
#
# Avoid extracting the stack unless we need to since it
# will have to access the linecache which can do blocking
# I/O and we are trying to avoid blocking calls.
#
# frame[0] is us
# frame[1] is raise_for_blocking_call
# frame[2] is protected_loop_func
# frame[3] is the offender
with suppress(ValueError):
return get_current_frame(4).f_code.co_filename.endswith("pydevd.py")
return False
@dataclass(slots=True, frozen=True)
class BlockingCall:
"""Class to hold information about a blocking call."""
original_func: Callable
object: object
function: str
check_allowed: Callable[[dict[str, Any]], bool] | None
strict: bool
strict_core: bool
skip_for_tests: bool
_BLOCKING_CALLS: tuple[BlockingCall, ...] = (
BlockingCall(
original_func=HTTPConnection.putrequest,
object=HTTPConnection,
function="putrequest",
check_allowed=None,
strict=True,
strict_core=True,
skip_for_tests=False,
),
BlockingCall(
original_func=time.sleep,
object=time,
function="sleep",
check_allowed=_check_sleep_call_allowed,
strict=True,
strict_core=True,
skip_for_tests=False,
),
BlockingCall(
original_func=glob.glob,
object=glob,
function="glob",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=False,
),
BlockingCall(
original_func=glob.iglob,
object=glob,
function="iglob",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=False,
),
BlockingCall(
original_func=os.walk,
object=os,
function="walk",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=False,
),
BlockingCall(
original_func=os.listdir,
object=os,
function="listdir",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=os.scandir,
object=os,
function="scandir",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=builtins.open,
object=builtins,
function="open",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=importlib.import_module,
object=importlib,
function="import_module",
check_allowed=_check_import_call_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_default_certs,
object=SSLContext,
function="load_default_certs",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_verify_locations,
object=SSLContext,
function="load_verify_locations",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=SSLContext.load_cert_chain,
object=SSLContext,
function="load_cert_chain",
check_allowed=None,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.open,
object=Path,
function="open",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.read_text,
object=Path,
function="read_text",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.read_bytes,
object=Path,
function="read_bytes",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.write_text,
object=Path,
function="write_text",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
BlockingCall(
original_func=Path.write_bytes,
object=Path,
function="write_bytes",
check_allowed=_check_file_allowed,
strict=False,
strict_core=False,
skip_for_tests=True,
),
)
@dataclass(slots=True)
class BlockedCalls:
"""Class to track which calls are blocked."""
calls: set[BlockingCall]
_BLOCKED_CALLS = BlockedCalls(set())
from .util.async_ import protect_loop
def enable() -> None:
"""Enable the detection of blocking calls in the event loop."""
calls = _BLOCKED_CALLS.calls
if calls:
raise RuntimeError("Blocking call detection is already enabled")
# Prevent urllib3 and requests doing I/O in event loop
HTTPConnection.putrequest = protect_loop( # type: ignore[method-assign]
HTTPConnection.putrequest
)
loop_thread_id = threading.get_ident()
for blocking_call in _BLOCKING_CALLS:
if _IN_TESTS and blocking_call.skip_for_tests:
continue
# Prevent sleeping in event loop. Non-strict since 2022.02
time.sleep = protect_loop(time.sleep, strict=False)
protected_function = protect_loop(
blocking_call.original_func,
strict=blocking_call.strict,
strict_core=blocking_call.strict_core,
check_allowed=blocking_call.check_allowed,
loop_thread_id=loop_thread_id,
)
setattr(blocking_call.object, blocking_call.function, protected_function)
calls.add(blocking_call)
# Currently disabled. pytz doing I/O when getting timezone.
# Prevent files being opened inside the event loop
# builtins.open = protect_loop(builtins.open)

View File

@@ -3,14 +3,11 @@
from __future__ import annotations
import asyncio
from collections import defaultdict
import contextlib
from functools import partial
from itertools import chain
from datetime import timedelta
import logging
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
import mimetypes
from operator import contains, itemgetter
import logging.handlers
from operator import itemgetter
import os
import platform
import sys
@@ -24,14 +21,7 @@ import cryptography.hazmat.backends.openssl.backend # noqa: F401
import voluptuous as vol
import yarl
from . import (
block_async_io,
config as conf_util,
config_entries,
core,
loader,
requirements,
)
from . import config as conf_util, config_entries, core, loader, requirements
# Pre-import frontend deps which have no requirements here to avoid
# loading them at run time and blocking the event loop. We do this ahead
@@ -42,11 +32,9 @@ from .components import (
api as api_pre_import, # noqa: F401
auth as auth_pre_import, # noqa: F401
config as config_pre_import, # noqa: F401
default_config as default_config_pre_import, # noqa: F401
device_automation as device_automation_pre_import, # noqa: F401
diagnostics as diagnostics_pre_import, # noqa: F401
file_upload as file_upload_pre_import, # noqa: F401
group as group_pre_import, # noqa: F401
history as history_pre_import, # noqa: F401
http, # not named pre_import since it has requirements
image_upload as image_upload_import, # noqa: F401 - not named pre_import since it has requirements
@@ -63,7 +51,6 @@ from .components import (
)
from .components.sensor import recorder as sensor_recorder # noqa: F401
from .const import (
BASE_PLATFORMS,
FORMAT_DATETIME,
KEY_DATA_LOGGING as DATA_LOGGING,
REQUIRED_NEXT_PYTHON_HA_RELEASE,
@@ -73,7 +60,6 @@ from .const import (
from .exceptions import HomeAssistantError
from .helpers import (
area_registry,
category_registry,
config_validation as cv,
device_registry,
entity,
@@ -86,43 +72,29 @@ from .helpers import (
template,
translation,
)
from .helpers.dispatcher import async_dispatcher_send_internal
from .helpers.storage import get_internal_store_manager
from .helpers.system_info import async_get_system_info, is_official_image
from .helpers.dispatcher import async_dispatcher_send
from .helpers.typing import ConfigType
from .setup import (
# _setup_started is marked as protected to make it clear
# that it is not part of the public API and should not be used
# by integrations. It is only used for internal tracking of
# which integrations are being set up.
_setup_started,
async_get_setup_timings,
BASE_PLATFORMS,
DATA_SETUP_STARTED,
DATA_SETUP_TIME,
async_notify_setup_error,
async_set_domains_to_be_loaded,
async_setup_component,
)
from .util.async_ import create_eager_task
from .util.hass_dict import HassKey
from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
with contextlib.suppress(ImportError):
# Ensure anyio backend is imported to avoid it being imported in the event loop
from anyio._backends import _asyncio # noqa: F401
from .util.package import async_get_user_site, is_virtual_env
if TYPE_CHECKING:
from .runner import RuntimeConfig
_LOGGER = logging.getLogger(__name__)
SETUP_ORDER_SORT_KEY = partial(contains, BASE_PLATFORMS)
ERROR_LOG_FILENAME = "home-assistant.log"
# hass.data key for logging information.
DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
LOG_SLOW_STARTUP_INTERVAL = 60
SLOW_STARTUP_CHECK_INTERVAL = 1
@@ -134,15 +106,8 @@ COOLDOWN_TIME = 60
DEBUGGER_INTEGRATIONS = {"debugpy"}
# Core integrations are unconditionally loaded
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
# Integrations that are loaded right after the core is set up
LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
# isal is loaded right away before `http` to ensure if its
# enabled, that `isal` is up to date.
"isal",
LOGGING_INTEGRATIONS = {
# Set log levels
"logger",
# Error logging
@@ -220,77 +185,35 @@ CRITICAL_INTEGRATIONS = {
"frontend",
}
SETUP_ORDER = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
SETUP_ORDER = {
# Load logging as soon as possible
"logging": LOGGING_INTEGRATIONS,
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS),
"frontend": FRONTEND_INTEGRATIONS,
# Setup recorder
("recorder", RECORDER_INTEGRATIONS),
"recorder": RECORDER_INTEGRATIONS,
# Start up debuggers. Start these first in case they want to wait.
("debugger", DEBUGGER_INTEGRATIONS),
)
#
# Storage keys we are likely to load during startup
# in order of when we expect to load them.
#
# If they do not exist they will not be loaded
#
PRELOAD_STORAGE = [
"core.logger",
"core.network",
"http.auth",
"image",
"lovelace_dashboards",
"lovelace_resources",
"core.uuid",
"lovelace.map",
"bluetooth.passive_update_processor",
"bluetooth.remote_scanners",
"assist_pipeline.pipelines",
"core.analytics",
"auth_module.totp",
]
"debugger": DEBUGGER_INTEGRATIONS,
}
async def async_setup_hass(
runtime_config: RuntimeConfig,
) -> core.HomeAssistant | None:
"""Set up Home Assistant."""
hass = core.HomeAssistant(runtime_config.config_dir)
async def create_hass() -> core.HomeAssistant:
"""Create the hass object and do basic setup."""
hass = core.HomeAssistant(runtime_config.config_dir)
loader.async_setup(hass)
await async_enable_logging(
hass,
runtime_config.verbose,
runtime_config.log_rotate_days,
runtime_config.log_file,
runtime_config.log_no_color,
)
if runtime_config.debug or hass.loop.get_debug():
hass.config.debug = True
hass.config.safe_mode = runtime_config.safe_mode
hass.config.skip_pip = runtime_config.skip_pip
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
return hass
async def stop_hass(hass: core.HomeAssistant) -> None:
"""Stop hass."""
# Ask integrations to shut down. It's messy but we can't
# do a clean stop without knowing what is broken
with contextlib.suppress(TimeoutError):
async with hass.timeout.async_timeout(10):
await hass.async_stop()
hass = await create_hass()
async_enable_logging(
hass,
runtime_config.verbose,
runtime_config.log_rotate_days,
runtime_config.log_file,
runtime_config.log_no_color,
)
hass.config.safe_mode = runtime_config.safe_mode
hass.config.skip_pip = runtime_config.skip_pip
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
_LOGGER.warning(
"Skipping pip installation of required modules. This may cause issues"
@@ -302,8 +225,7 @@ async def async_setup_hass(
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
block_async_io.enable()
loader.async_setup(hass)
config_dict = None
basic_setup_success = False
@@ -327,31 +249,29 @@ async def async_setup_hass(
if config_dict is None:
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
elif not basic_setup_success:
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
_LOGGER.warning(
"Detected that %s did not load. Activating recovery mode",
",".join(CRITICAL_INTEGRATIONS),
)
# Ask integrations to shut down. It's messy but we can't
# do a clean stop without knowing what is broken
with contextlib.suppress(TimeoutError):
async with hass.timeout.async_timeout(10):
await hass.async_stop()
recovery_mode = True
old_config = hass.config
old_logging = hass.data.get(DATA_LOGGING)
recovery_mode = True
await stop_hass(hass)
hass = await create_hass()
hass = core.HomeAssistant(old_config.config_dir)
if old_logging:
hass.data[DATA_LOGGING] = old_logging
hass.config.debug = old_config.debug
hass.config.skip_pip = old_config.skip_pip
hass.config.skip_pip_packages = old_config.skip_pip_packages
hass.config.internal_url = old_config.internal_url
@@ -398,45 +318,37 @@ def open_hass_ui(hass: core.HomeAssistant) -> None:
)
def _init_blocking_io_modules_in_executor() -> None:
"""Initialize modules that do blocking I/O in executor."""
# Cache the result of platform.uname().processor in the executor.
# Multiple modules call this function at startup which
# executes a blocking subprocess call. This is a problem for the
# asyncio event loop. By priming the cache of uname we can
# avoid the blocking call in the event loop.
_ = platform.uname().processor
# Initialize the mimetypes module to avoid blocking calls
# to the filesystem to load the mime.types file.
mimetypes.init()
# Initialize is_official_image and is_docker_env to avoid blocking calls
# to the filesystem.
is_official_image()
is_docker_env()
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
"""Load the registries and modules that will do blocking I/O."""
"""Load the registries and cache the result of platform.uname().processor."""
if DATA_REGISTRIES_LOADED in hass.data:
return
hass.data[DATA_REGISTRIES_LOADED] = None
def _cache_uname_processor() -> None:
"""Cache the result of platform.uname().processor in the executor.
Multiple modules call this function at startup which
executes a blocking subprocess call. This is a problem for the
asyncio event loop. By primeing the cache of uname we can
avoid the blocking call in the event loop.
"""
platform.uname().processor # pylint: disable=expression-not-assigned
# Load the registries and cache the result of platform.uname().processor
translation.async_setup(hass)
entity.async_setup(hass)
template.async_setup(hass)
await asyncio.gather(
create_eager_task(get_internal_store_manager(hass).async_initialize()),
create_eager_task(area_registry.async_load(hass)),
create_eager_task(category_registry.async_load(hass)),
create_eager_task(device_registry.async_load(hass)),
create_eager_task(entity_registry.async_load(hass)),
create_eager_task(floor_registry.async_load(hass)),
create_eager_task(issue_registry.async_load(hass)),
create_eager_task(label_registry.async_load(hass)),
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
hass.async_add_executor_job(_cache_uname_processor),
create_eager_task(template.async_load_custom_templates(hass)),
create_eager_task(restore_state.async_load(hass)),
create_eager_task(hass.config_entries.async_initialize()),
create_eager_task(async_get_system_info(hass)),
)
@@ -451,9 +363,6 @@ async def async_from_config_dict(
start = monotonic()
hass.config_entries = config_entries.ConfigEntries(hass, config)
# Prime custom component cache early so we know if registry entries are tied
# to a custom integration
await loader.async_get_custom_components(hass)
await async_load_base_functionality(hass)
# Set up core.
@@ -462,11 +371,7 @@ async def async_from_config_dict(
if not all(
await asyncio.gather(
*(
create_eager_task(
async_setup_component(hass, domain, config),
name=f"bootstrap setup {domain}",
loop=hass.loop,
)
create_eager_task(async_setup_component(hass, domain, config))
for domain in CORE_INTEGRATIONS
)
)
@@ -529,7 +434,8 @@ async def async_from_config_dict(
return hass
async def async_enable_logging(
@core.callback
def async_enable_logging(
hass: core.HomeAssistant,
verbose: bool = False,
log_rotate_days: int | None = None,
@@ -586,10 +492,10 @@ async def async_enable_logging(
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
sys.excepthook = lambda *args: logging.getLogger().exception(
sys.excepthook = lambda *args: logging.getLogger(None).exception(
"Uncaught exception", exc_info=args
)
threading.excepthook = lambda args: logging.getLogger().exception(
threading.excepthook = lambda args: logging.getLogger(None).exception(
"Uncaught thread exception",
exc_info=( # type: ignore[arg-type]
args.exc_type,
@@ -612,13 +518,28 @@ async def async_enable_logging(
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days
err_handler: (
logging.handlers.RotatingFileHandler
| logging.handlers.TimedRotatingFileHandler
)
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when="midnight", backupCount=log_rotate_days
)
else:
err_handler = logging.handlers.RotatingFileHandler(
err_log_path, backupCount=1
)
try:
err_handler.doRollover()
except OSError as err:
_LOGGER.error("Error rolling over log file: %s", err)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger = logging.getLogger()
logger = logging.getLogger("")
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
@@ -630,41 +551,6 @@ async def async_enable_logging(
async_activate_log_queue_handler(hass)
def _create_log_file(
err_log_path: str, log_rotate_days: int | None
) -> RotatingFileHandler | TimedRotatingFileHandler:
"""Create log file and do roll over."""
err_handler: RotatingFileHandler | TimedRotatingFileHandler
if log_rotate_days:
err_handler = TimedRotatingFileHandler(
err_log_path, when="midnight", backupCount=log_rotate_days
)
else:
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
err_log_path, backupCount=1
)
try:
err_handler.doRollover()
except OSError as err:
_LOGGER.error("Error rolling over log file: %s", err)
return err_handler
class _RotatingFileHandlerWithoutShouldRollOver(RotatingFileHandler):
"""RotatingFileHandler that does not check if it should roll over on every log."""
def shouldRollover(self, record: logging.LogRecord) -> bool:
"""Never roll over.
The shouldRollover check is expensive because it has to stat
the log file for every log record. Since we do not set maxBytes
the result of this check is always False.
"""
return False
async def async_mount_local_lib_path(config_dir: str) -> str:
"""Add local library to Python Path.
@@ -702,9 +588,7 @@ class _WatchPendingSetups:
"""Periodic log and dispatch of setups that are pending."""
def __init__(
self,
hass: core.HomeAssistant,
setup_started: dict[tuple[str, str | None], float],
self, hass: core.HomeAssistant, setup_started: dict[str, float]
) -> None:
"""Initialize the WatchPendingSetups class."""
self._hass = hass
@@ -719,14 +603,13 @@ class _WatchPendingSetups:
now = monotonic()
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
remaining_with_setup_started: defaultdict[str, float] = defaultdict(float)
for integration_group, start_time in self._setup_started.items():
domain, _ = integration_group
remaining_with_setup_started[domain] += now - start_time
remaining_with_setup_started = {
domain: (now - start_time)
for domain, start_time in self._setup_started.items()
}
if remaining_with_setup_started:
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
elif waiting_tasks := self._hass._active_tasks: # noqa: SLF001
elif waiting_tasks := self._hass._active_tasks: # pylint: disable=protected-access
_LOGGER.debug("Waiting on tasks: %s", waiting_tasks)
self._async_dispatch(remaining_with_setup_started)
if (
@@ -737,7 +620,7 @@ class _WatchPendingSetups:
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
_LOGGER.warning(
"Waiting on integrations to complete setup: %s",
self._setup_started,
", ".join(self._setup_started),
)
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
@@ -746,7 +629,7 @@ class _WatchPendingSetups:
def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
"""Dispatch the signal."""
if remaining_with_setup_started or not self._previous_was_empty:
async_dispatcher_send_internal(
async_dispatcher_send(
self._hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
)
self._previous_was_empty = not remaining_with_setup_started
@@ -777,18 +660,13 @@ async def async_setup_multi_components(
"""Set up multiple domains. Log on failure."""
# Avoid creating tasks for domains that were setup in a previous stage
domains_not_yet_setup = domains - hass.config.components
# Create setup tasks for base platforms first since everything will have
# to wait to be imported, and the sooner we can get the base platforms
# loaded the sooner we can start loading the rest of the integrations.
futures = {
domain: hass.async_create_task_internal(
domain: hass.async_create_task(
async_setup_component(hass, domain, config),
f"setup component {domain}",
eager_start=True,
)
for domain in sorted(
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
)
for domain in domains_not_yet_setup
}
results = await asyncio.gather(*futures.values(), return_exceptions=True)
for idx, domain in enumerate(futures):
@@ -805,53 +683,29 @@ async def _async_resolve_domains_to_setup(
hass: core.HomeAssistant, config: dict[str, Any]
) -> tuple[set[str], dict[str, loader.Integration]]:
"""Resolve all dependencies and return list of domains to set up."""
base_platforms_loaded = False
domains_to_setup = _get_domains(hass, config)
needed_requirements: set[str] = set()
platform_integrations = conf_util.extract_platform_integrations(
config, BASE_PLATFORMS
)
# Ensure base platforms that have platform integrations are added to
# to `domains_to_setup so they can be setup first instead of
# discovering them when later when a config entry setup task
# notices its needed and there is already a long line to use
# the import executor.
#
# For example if we have
# sensor:
# - platform: template
#
# `template` has to be loaded to validate the config for sensor
# so we want to start loading `sensor` as soon as we know
# it will be needed. The more platforms under `sensor:`, the longer
# it will take to finish setup for `sensor` because each of these
# platforms has to be imported before we can validate the config.
#
# Thankfully we are migrating away from the platform pattern
# so this will be less of a problem in the future.
domains_to_setup.update(platform_integrations)
# Load manifests for base platforms and platform based integrations
# that are defined under base platforms right away since we do not require
# the manifest to list them as dependencies and we want to avoid the lock
# contention when multiple integrations try to load them at once
additional_manifests_to_load = {
*BASE_PLATFORMS,
*chain.from_iterable(platform_integrations.values()),
}
translations_to_load = additional_manifests_to_load.copy()
# Resolve all dependencies so we know all integrations
# that will have to be loaded and start right-away
# that will have to be loaded and start rightaway
integration_cache: dict[str, loader.Integration] = {}
to_resolve: set[str] = domains_to_setup
while to_resolve or additional_manifests_to_load:
while to_resolve:
old_to_resolve: set[str] = to_resolve
to_resolve = set()
if additional_manifests_to_load:
to_get = {*old_to_resolve, *additional_manifests_to_load}
additional_manifests_to_load.clear()
if not base_platforms_loaded:
# Load base platforms right away since
# we do not require the manifest to list
# them as dependencies and we want
# to avoid the lock contention when multiple
# integrations try to resolve them at once
base_platforms_loaded = True
to_get = {*old_to_resolve, *BASE_PLATFORMS, *platform_integrations}
else:
to_get = old_to_resolve
@@ -864,17 +718,6 @@ async def _async_resolve_domains_to_setup(
continue
integration_cache[domain] = itg
needed_requirements.update(itg.requirements)
# Make sure manifests for dependencies are loaded in the next
# loop to try to group as many as manifest loads in a single
# call to avoid the creating one-off executor jobs later in
# the setup process
additional_manifests_to_load.update(
dep
for dep in chain(itg.dependencies, itg.after_dependencies)
if dep not in integration_cache
)
if domain not in old_to_resolve:
continue
@@ -907,13 +750,7 @@ async def _async_resolve_domains_to_setup(
await asyncio.gather(*resolve_dependencies_tasks)
for itg in integrations_to_process:
try:
all_deps = itg.all_dependencies
except RuntimeError:
# Integration.all_dependencies raises RuntimeError if
# dependencies could not be resolved
continue
for dep in all_deps:
for dep in itg.all_dependencies:
if dep in domains_to_setup:
continue
domains_to_setup.add(dep)
@@ -929,12 +766,6 @@ async def _async_resolve_domains_to_setup(
"check installed requirements",
eager_start=True,
)
#
# Only add the domains_to_setup after we finish resolving
# as new domains are likely to added in the process
#
translations_to_load.update(domains_to_setup)
# Start loading translations for all integrations we are going to set up
# in the background so they are ready when we need them. This avoids a
# lot of waiting for the translation load lock and a thundering herd of
@@ -946,19 +777,10 @@ async def _async_resolve_domains_to_setup(
# wait for the translation load lock, loading will be done by the
# time it gets to it.
hass.async_create_background_task(
translation.async_load_integrations(hass, translations_to_load),
"load translations",
eager_start=True,
)
# Preload storage for all integrations we are going to set up
# so we do not have to wait for it to be loaded when we need it
# in the setup process.
hass.async_create_background_task(
get_internal_store_manager(hass).async_preload(
[*PRELOAD_STORAGE, *domains_to_setup]
translation.async_load_integrations(
hass, {*BASE_PLATFORMS, *platform_integrations, *domains_to_setup}
),
"preload storage",
"load translations",
eager_start=True,
)
@@ -969,7 +791,11 @@ async def _async_set_up_integrations(
hass: core.HomeAssistant, config: dict[str, Any]
) -> None:
"""Set up all the integrations."""
watcher = _WatchPendingSetups(hass, _setup_started(hass))
setup_started: dict[str, float] = {}
hass.data[DATA_SETUP_STARTED] = setup_started
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
watcher = _WatchPendingSetups(hass, setup_started)
watcher.async_start()
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
@@ -980,9 +806,10 @@ async def _async_set_up_integrations(
if "recorder" in domains_to_setup:
recorder.async_initialize_recorder(hass)
pre_stage_domains = [
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
]
pre_stage_domains: dict[str, set[str]] = {
name: domains_to_setup & domain_group
for name, domain_group in SETUP_ORDER.items()
}
# calculate what components to setup in what stage
stage_1_domains: set[str] = set()
@@ -1008,18 +835,10 @@ async def _async_set_up_integrations(
stage_2_domains = domains_to_setup - stage_1_domains
for name, domain_group in pre_stage_domains:
for name, domain_group in pre_stage_domains.items():
if domain_group:
stage_2_domains -= domain_group
_LOGGER.info("Setting up %s: %s", name, domain_group)
to_be_loaded = domain_group.copy()
to_be_loaded.update(
dep
for domain in domain_group
if (integration := integration_cache.get(domain)) is not None
for dep in integration.all_dependencies
)
async_set_domains_to_be_loaded(hass, to_be_loaded)
await async_setup_multi_components(hass, domain_group, config)
# Enables after dependencies when setting up stage 1 domains
@@ -1036,7 +855,7 @@ async def _async_set_up_integrations(
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 1 waiting on %s - moving forward",
hass._active_tasks, # noqa: SLF001
hass._active_tasks, # pylint: disable=protected-access
)
# Add after dependencies when setting up stage 2 domains
@@ -1052,7 +871,7 @@ async def _async_set_up_integrations(
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 2 waiting on %s - moving forward",
hass._active_tasks, # noqa: SLF001
hass._active_tasks, # pylint: disable=protected-access
)
# Wrap up startup
@@ -1063,14 +882,12 @@ async def _async_set_up_integrations(
except TimeoutError:
_LOGGER.warning(
"Setup timed out for bootstrap waiting on %s - moving forward",
hass._active_tasks, # noqa: SLF001
hass._active_tasks, # pylint: disable=protected-access
)
watcher.async_stop()
if _LOGGER.isEnabledFor(logging.DEBUG):
setup_time = async_get_setup_timings(hass)
_LOGGER.debug(
"Integration setup times: %s",
dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
)
_LOGGER.debug(
"Integration setup times: %s",
dict(sorted(setup_time.items(), key=itemgetter(1))),
)

View File

@@ -1,5 +0,0 @@
{
"domain": "ambient_weather",
"name": "Ambient Weather",
"integrations": ["ambient_network", "ambient_station"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "aqara",
"name": "Aqara",
"iot_standards": ["matter", "zigbee"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "asterisk",
"name": "Asterisk",
"integrations": ["asterisk_cdr", "asterisk_mbox"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "epson",
"name": "Epson",
"integrations": ["epson", "epsonworkforce"]
}

View File

@@ -1,5 +1,5 @@
{
"domain": "eq3",
"name": "eQ-3",
"integrations": ["maxcube", "eq3btsmart"]
"integrations": ["maxcube"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "fujitsu",
"name": "Fujitsu",
"integrations": ["fujitsu_anywair", "fujitsu_fglair"]
}

View File

@@ -9,7 +9,6 @@
"google_generative_ai_conversation",
"google_mail",
"google_maps",
"google_photos",
"google_pubsub",
"google_sheets",
"google_tasks",

View File

@@ -1,5 +1,5 @@
{
"domain": "logitech",
"name": "Logitech",
"integrations": ["harmony", "squeezebox"]
"integrations": ["harmony", "ue_smart_radio", "squeezebox"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "motionblinds",
"name": "Motionblinds",
"integrations": ["motion_blinds", "motionblinds_ble"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "roth",
"name": "Roth",
"integrations": ["touchline", "touchline_sl"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "ruuvi",
"name": "Ruuvi",
"integrations": ["ruuvi_gateway", "ruuvitag_ble"]
}

View File

@@ -1,5 +1,5 @@
{
"domain": "tesla",
"name": "Tesla",
"integrations": ["powerwall", "tesla_wall_connector", "tesla_fleet"]
"integrations": ["powerwall", "tesla_wall_connector"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "weatherflow",
"name": "WeatherFlow",
"integrations": ["weatherflow", "weatherflow_cloud"]
}

View File

@@ -1,11 +1,5 @@
{
"domain": "yale",
"name": "Yale",
"integrations": [
"august",
"yale_smart_alarm",
"yalexs_ble",
"yale_home",
"yale"
]
"integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"]
}

View File

@@ -6,3 +6,52 @@ Component design guidelines:
format "<DOMAIN>.<OBJECT_ID>".
- Each component should publish services only under its own domain.
"""
from __future__ import annotations
import logging
from homeassistant.core import HomeAssistant, split_entity_id
from homeassistant.helpers.frame import report
from homeassistant.helpers.group import expand_entity_ids
_LOGGER = logging.getLogger(__name__)
def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool:
"""Load up the module to call the is_on method.
If there is no entity id given we will check all.
"""
report(
(
"uses homeassistant.components.is_on."
" This is deprecated and will stop working in Home Assistant 2024.9, it"
" should be updated to use the function of the platform directly."
),
error_if_core=True,
)
if entity_id:
entity_ids = expand_entity_ids(hass, [entity_id])
else:
entity_ids = hass.states.entity_ids()
for ent_id in entity_ids:
domain = split_entity_id(ent_id)[0]
try:
component = getattr(hass.components, domain)
except ImportError:
_LOGGER.error("Failed to call %s.is_on: component not found", domain)
continue
if not hasattr(component, "is_on"):
_LOGGER.warning("Integration %s has no is_on method", domain)
continue
if component.is_on(ent_id):
return True
return False

View File

@@ -4,10 +4,10 @@ from __future__ import annotations
from dataclasses import dataclass, field
from functools import partial
from pathlib import Path
from jaraco.abode.automation import Automation as AbodeAuto
from jaraco.abode.client import Client as Abode
import jaraco.abode.config
from jaraco.abode.devices.base import Device as AbodeDev
from jaraco.abode.exceptions import (
AuthenticationException as AbodeAuthenticationException,
Exception as AbodeException,
@@ -29,11 +29,11 @@ from homeassistant.const import (
)
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, entity
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.typing import ConfigType
from .const import CONF_POLLING, DOMAIN, LOGGER
from .const import ATTRIBUTION, CONF_POLLING, DOMAIN, LOGGER
SERVICE_SETTINGS = "change_setting"
SERVICE_CAPTURE_IMAGE = "capture_image"
@@ -83,21 +83,12 @@ class AbodeSystem:
logout_listener: CALLBACK_TYPE | None = None
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Abode component."""
setup_hass_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Abode integration from a config entry."""
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
polling = entry.data[CONF_POLLING]
# Configure abode library to use config directory for storing data
jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode")))
# For previous config entries where unique_id is None
if entry.unique_id is None:
hass.config_entries.async_update_entry(
@@ -120,6 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
await setup_hass_events(hass)
await hass.async_add_executor_job(setup_hass_services, hass)
await hass.async_add_executor_job(setup_abode_events, hass)
return True
@@ -127,6 +119,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
hass.services.async_remove(DOMAIN, SERVICE_SETTINGS)
hass.services.async_remove(DOMAIN, SERVICE_CAPTURE_IMAGE)
hass.services.async_remove(DOMAIN, SERVICE_TRIGGER_AUTOMATION)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
await hass.async_add_executor_job(hass.data[DOMAIN].abode.events.stop)
@@ -179,15 +175,15 @@ def setup_hass_services(hass: HomeAssistant) -> None:
signal = f"abode_trigger_automation_{entity_id}"
dispatcher_send(hass, signal)
hass.services.async_register(
hass.services.register(
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
)
hass.services.async_register(
hass.services.register(
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
)
hass.services.async_register(
hass.services.register(
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
)
@@ -251,3 +247,108 @@ def setup_abode_events(hass: HomeAssistant) -> None:
hass.data[DOMAIN].abode.events.add_event_callback(
event, partial(event_callback, event)
)
class AbodeEntity(entity.Entity):
"""Representation of an Abode entity."""
_attr_attribution = ATTRIBUTION
_attr_has_entity_name = True
def __init__(self, data: AbodeSystem) -> None:
"""Initialize Abode entity."""
self._data = data
self._attr_should_poll = data.polling
async def async_added_to_hass(self) -> None:
"""Subscribe to Abode connection status updates."""
await self.hass.async_add_executor_job(
self._data.abode.events.add_connection_status_callback,
self.unique_id,
self._update_connection_status,
)
self.hass.data[DOMAIN].entity_ids.add(self.entity_id)
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from Abode connection status updates."""
await self.hass.async_add_executor_job(
self._data.abode.events.remove_connection_status_callback, self.unique_id
)
def _update_connection_status(self) -> None:
"""Update the entity available property."""
self._attr_available = self._data.abode.events.connected
self.schedule_update_ha_state()
class AbodeDevice(AbodeEntity):
"""Representation of an Abode device."""
def __init__(self, data: AbodeSystem, device: AbodeDev) -> None:
"""Initialize Abode device."""
super().__init__(data)
self._device = device
self._attr_unique_id = device.uuid
async def async_added_to_hass(self) -> None:
"""Subscribe to device events."""
await super().async_added_to_hass()
await self.hass.async_add_executor_job(
self._data.abode.events.add_device_callback,
self._device.id,
self._update_callback,
)
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from device events."""
await super().async_will_remove_from_hass()
await self.hass.async_add_executor_job(
self._data.abode.events.remove_all_device_callbacks, self._device.id
)
def update(self) -> None:
"""Update device state."""
self._device.refresh()
@property
def extra_state_attributes(self) -> dict[str, str]:
"""Return the state attributes."""
return {
"device_id": self._device.id,
"battery_low": self._device.battery_low,
"no_response": self._device.no_response,
"device_type": self._device.type,
}
@property
def device_info(self) -> DeviceInfo:
"""Return device registry information for this entity."""
return DeviceInfo(
identifiers={(DOMAIN, self._device.id)},
manufacturer="Abode",
model=self._device.type,
name=self._device.name,
)
def _update_callback(self, device: AbodeDev) -> None:
"""Update the device state."""
self.schedule_update_ha_state()
class AbodeAutomation(AbodeEntity):
"""Representation of an Abode automation."""
def __init__(self, data: AbodeSystem, automation: AbodeAuto) -> None:
"""Initialize for Abode automation."""
super().__init__(data)
self._automation = automation
self._attr_name = automation.name
self._attr_unique_id = automation.automation_id
self._attr_extra_state_attributes = {
"type": "CUE automation",
}
def update(self) -> None:
"""Update automation state."""
self._automation.refresh()

View File

@@ -17,9 +17,8 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
async def async_setup_entry(

View File

@@ -4,7 +4,14 @@ from __future__ import annotations
from typing import cast
from jaraco.abode.devices.binary_sensor import BinarySensor
from jaraco.abode.devices.sensor import BinarySensor
from jaraco.abode.helpers.constants import (
TYPE_CONNECTIVITY,
TYPE_MOISTURE,
TYPE_MOTION,
TYPE_OCCUPANCY,
TYPE_OPENING,
)
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -15,9 +22,8 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.enum import try_parse_enum
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
async def async_setup_entry(
@@ -27,11 +33,11 @@ async def async_setup_entry(
data: AbodeSystem = hass.data[DOMAIN]
device_types = [
"connectivity",
"moisture",
"motion",
"occupancy",
"door",
TYPE_CONNECTIVITY,
TYPE_MOISTURE,
TYPE_MOTION,
TYPE_OCCUPANCY,
TYPE_OPENING,
]
async_add_entities(

View File

@@ -8,6 +8,7 @@ from typing import Any, cast
from jaraco.abode.devices.base import Device
from jaraco.abode.devices.camera import Camera as AbodeCam
from jaraco.abode.helpers import timeline
from jaraco.abode.helpers.constants import TYPE_CAMERA
import requests
from requests.models import Response
@@ -18,9 +19,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import Throttle
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN, LOGGER
from .entity import AbodeDevice
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
@@ -33,7 +33,7 @@ async def async_setup_entry(
async_add_entities(
AbodeCamera(data, device, timeline.CAPTURE_IMAGE)
for device in data.abode.get_devices(generic_type="camera")
for device in data.abode.get_devices(generic_type=TYPE_CAMERA)
)

View File

@@ -3,15 +3,15 @@
from typing import Any
from jaraco.abode.devices.cover import Cover
from jaraco.abode.helpers.constants import TYPE_COVER
from homeassistant.components.cover import CoverEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
async def async_setup_entry(
@@ -22,7 +22,7 @@ async def async_setup_entry(
async_add_entities(
AbodeCover(data, device)
for device in data.abode.get_devices(generic_type="cover")
for device in data.abode.get_devices(generic_type=TYPE_COVER)
)

View File

@@ -1,115 +0,0 @@
"""Support for Abode Security System entities."""
from jaraco.abode.automation import Automation as AbodeAuto
from jaraco.abode.devices.base import Device as AbodeDev
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
from . import AbodeSystem
from .const import ATTRIBUTION, DOMAIN
class AbodeEntity(Entity):
"""Representation of an Abode entity."""
_attr_attribution = ATTRIBUTION
_attr_has_entity_name = True
def __init__(self, data: AbodeSystem) -> None:
"""Initialize Abode entity."""
self._data = data
self._attr_should_poll = data.polling
async def async_added_to_hass(self) -> None:
"""Subscribe to Abode connection status updates."""
await self.hass.async_add_executor_job(
self._data.abode.events.add_connection_status_callback,
self.unique_id,
self._update_connection_status,
)
self.hass.data[DOMAIN].entity_ids.add(self.entity_id)
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from Abode connection status updates."""
await self.hass.async_add_executor_job(
self._data.abode.events.remove_connection_status_callback, self.unique_id
)
def _update_connection_status(self) -> None:
"""Update the entity available property."""
self._attr_available = self._data.abode.events.connected
self.schedule_update_ha_state()
class AbodeDevice(AbodeEntity):
"""Representation of an Abode device."""
def __init__(self, data: AbodeSystem, device: AbodeDev) -> None:
"""Initialize Abode device."""
super().__init__(data)
self._device = device
self._attr_unique_id = device.uuid
async def async_added_to_hass(self) -> None:
"""Subscribe to device events."""
await super().async_added_to_hass()
await self.hass.async_add_executor_job(
self._data.abode.events.add_device_callback,
self._device.id,
self._update_callback,
)
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from device events."""
await super().async_will_remove_from_hass()
await self.hass.async_add_executor_job(
self._data.abode.events.remove_all_device_callbacks, self._device.id
)
def update(self) -> None:
"""Update device state."""
self._device.refresh()
@property
def extra_state_attributes(self) -> dict[str, str]:
"""Return the state attributes."""
return {
"device_id": self._device.id,
"battery_low": self._device.battery_low,
"no_response": self._device.no_response,
"device_type": self._device.type,
}
@property
def device_info(self) -> DeviceInfo:
"""Return device registry information for this entity."""
return DeviceInfo(
identifiers={(DOMAIN, self._device.id)},
manufacturer="Abode",
model=self._device.type,
name=self._device.name,
)
def _update_callback(self, device: AbodeDev) -> None:
"""Update the device state."""
self.schedule_update_ha_state()
class AbodeAutomation(AbodeEntity):
"""Representation of an Abode automation."""
def __init__(self, data: AbodeSystem, automation: AbodeAuto) -> None:
"""Initialize for Abode automation."""
super().__init__(data)
self._automation = automation
self._attr_name = automation.name
self._attr_unique_id = automation.id
self._attr_extra_state_attributes = {
"type": "CUE automation",
}
def update(self) -> None:
"""Update automation state."""
self._automation.refresh()

View File

@@ -5,16 +5,5 @@
"default": "mdi:robot"
}
}
},
"services": {
"capture_image": {
"service": "mdi:camera"
},
"change_setting": {
"service": "mdi:cog"
},
"trigger_automation": {
"service": "mdi:play"
}
}
}

View File

@@ -6,6 +6,7 @@ from math import ceil
from typing import Any
from jaraco.abode.devices.light import Light
from jaraco.abode.helpers.constants import TYPE_LIGHT
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
@@ -22,9 +23,8 @@ from homeassistant.util.color import (
color_temperature_mired_to_kelvin,
)
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
async def async_setup_entry(
@@ -35,7 +35,7 @@ async def async_setup_entry(
async_add_entities(
AbodeLight(data, device)
for device in data.abode.get_devices(generic_type="light")
for device in data.abode.get_devices(generic_type=TYPE_LIGHT)
)

View File

@@ -3,15 +3,15 @@
from typing import Any
from jaraco.abode.devices.lock import Lock
from jaraco.abode.helpers.constants import TYPE_LOCK
from homeassistant.components.lock import LockEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
async def async_setup_entry(
@@ -22,7 +22,7 @@ async def async_setup_entry(
async_add_entities(
AbodeLock(data, device)
for device in data.abode.get_devices(generic_type="lock")
for device in data.abode.get_devices(generic_type=TYPE_LOCK)
)

View File

@@ -9,5 +9,5 @@
},
"iot_class": "cloud_push",
"loggers": ["jaraco.abode", "lomond"],
"requirements": ["jaraco.abode==6.2.1"]
"requirements": ["jaraco.abode==3.3.0", "jaraco.functools==3.9.0"]
}

View File

@@ -7,6 +7,15 @@ from dataclasses import dataclass
from typing import cast
from jaraco.abode.devices.sensor import Sensor
from jaraco.abode.helpers.constants import (
HUMI_STATUS_KEY,
LUX_STATUS_KEY,
STATUSES_KEY,
TEMP_STATUS_KEY,
TYPE_SENSOR,
UNIT_CELSIUS,
UNIT_FAHRENHEIT,
)
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -18,13 +27,12 @@ from homeassistant.const import LIGHT_LUX, PERCENTAGE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AbodeSystem
from . import AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeDevice
ABODE_TEMPERATURE_UNIT_HA_UNIT = {
"°F": UnitOfTemperature.FAHRENHEIT,
"°C": UnitOfTemperature.CELSIUS,
UNIT_FAHRENHEIT: UnitOfTemperature.FAHRENHEIT,
UNIT_CELSIUS: UnitOfTemperature.CELSIUS,
}
@@ -38,7 +46,7 @@ class AbodeSensorDescription(SensorEntityDescription):
SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
AbodeSensorDescription(
key="temperature",
key=TEMP_STATUS_KEY,
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement_fn=lambda device: ABODE_TEMPERATURE_UNIT_HA_UNIT[
device.temp_unit
@@ -46,13 +54,13 @@ SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
value_fn=lambda device: cast(float, device.temp),
),
AbodeSensorDescription(
key="humidity",
key=HUMI_STATUS_KEY,
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement_fn=lambda _: PERCENTAGE,
value_fn=lambda device: cast(float, device.humidity),
),
AbodeSensorDescription(
key="lux",
key=LUX_STATUS_KEY,
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement_fn=lambda _: LIGHT_LUX,
value_fn=lambda device: cast(float, device.lux),
@@ -69,8 +77,8 @@ async def async_setup_entry(
async_add_entities(
AbodeSensor(data, device, description)
for description in SENSOR_TYPES
for device in data.abode.get_devices(generic_type="sensor")
if description.key in device.get_value("statuses")
for device in data.abode.get_devices(generic_type=TYPE_SENSOR)
if description.key in device.get_value(STATUSES_KEY)
)

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from typing import Any, cast
from jaraco.abode.devices.switch import Switch
from jaraco.abode.helpers.constants import TYPE_SWITCH, TYPE_VALVE
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
@@ -12,11 +13,10 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AbodeSystem
from . import AbodeAutomation, AbodeDevice, AbodeSystem
from .const import DOMAIN
from .entity import AbodeAutomation, AbodeDevice
DEVICE_TYPES = ["switch", "valve"]
DEVICE_TYPES = [TYPE_SWITCH, TYPE_VALVE]
async def async_setup_entry(
@@ -88,4 +88,4 @@ class AbodeAutomationSwitch(AbodeAutomation, SwitchEntity):
@property
def is_on(self) -> bool:
"""Return True if the automation is enabled."""
return bool(self._automation.enabled)
return bool(self._automation.is_enabled)

View File

@@ -2,10 +2,14 @@
from __future__ import annotations
from dataclasses import dataclass
from asyncio import timeout
from datetime import timedelta
import logging
from typing import Any
from accuweather import AccuWeather
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientConnectorError
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
from homeassistant.config_entries import ConfigEntry
@@ -13,71 +17,43 @@ from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
from .coordinator import (
AccuWeatherDailyForecastDataUpdateCoordinator,
AccuWeatherObservationDataUpdateCoordinator,
)
from .const import ATTR_FORECAST, CONF_FORECAST, DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
@dataclass
class AccuWeatherData:
"""Data for AccuWeather integration."""
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up AccuWeather as config entry."""
api_key: str = entry.data[CONF_API_KEY]
name: str = entry.data[CONF_NAME]
assert entry.unique_id is not None
location_key = entry.unique_id
forecast: bool = entry.options.get(CONF_FORECAST, False)
_LOGGER.debug("Using location_key: %s", location_key)
_LOGGER.debug("Using location_key: %s, get forecast: %s", location_key, forecast)
websession = async_get_clientsession(hass)
accuweather = AccuWeather(api_key, websession, location_key=location_key)
coordinator_observation = AccuWeatherObservationDataUpdateCoordinator(
hass,
accuweather,
name,
"observation",
UPDATE_INTERVAL_OBSERVATION,
coordinator = AccuWeatherDataUpdateCoordinator(
hass, websession, api_key, location_key, forecast, name
)
await coordinator.async_config_entry_first_refresh()
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
hass,
accuweather,
name,
"daily forecast",
UPDATE_INTERVAL_DAILY_FORECAST,
)
entry.async_on_unload(entry.add_update_listener(update_listener))
await coordinator_observation.async_config_entry_first_refresh()
await coordinator_daily_forecast.async_config_entry_first_refresh()
entry.runtime_data = AccuWeatherData(
coordinator_observation=coordinator_observation,
coordinator_daily_forecast=coordinator_daily_forecast,
)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Remove ozone sensors from registry if they exist
ent_reg = er.async_get(hass)
for day in range(5):
unique_id = f"{location_key}-ozone-{day}"
for day in range(0, 5):
unique_id = f"{coordinator.location_key}-ozone-{day}"
if entity_id := ent_reg.async_get_entity_id(SENSOR_PLATFORM, DOMAIN, unique_id):
_LOGGER.debug("Removing ozone sensor entity %s", entity_id)
ent_reg.async_remove(entity_id)
@@ -85,8 +61,78 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
return True
async def async_unload_entry(
hass: HomeAssistant, entry: AccuWeatherConfigEntry
) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener."""
await hass.config_entries.async_reload(entry.entry_id)
class AccuWeatherDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # pylint: disable=hass-enforce-coordinator-module
"""Class to manage fetching AccuWeather data API."""
def __init__(
self,
hass: HomeAssistant,
session: ClientSession,
api_key: str,
location_key: str,
forecast: bool,
name: str,
) -> None:
"""Initialize."""
self.location_key = location_key
self.forecast = forecast
self.accuweather = AccuWeather(api_key, session, location_key=location_key)
self.device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, location_key)},
manufacturer=MANUFACTURER,
name=name,
# You don't need to provide specific details for the URL,
# so passing in _ characters is fine if the location key
# is correct
configuration_url=(
"http://accuweather.com/en/"
f"_/_/{location_key}/"
f"weather-forecast/{location_key}/"
),
)
# Enabling the forecast download increases the number of requests per data
# update, we use 40 minutes for current condition only and 80 minutes for
# current condition and forecast as update interval to not exceed allowed number
# of requests. We have 50 requests allowed per day, so we use 36 and leave 14 as
# a reserve for restarting HA.
update_interval = timedelta(minutes=40)
if self.forecast:
update_interval *= 2
_LOGGER.debug("Data will be update every %s", update_interval)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=update_interval)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
forecast: list[dict[str, Any]] = []
try:
async with timeout(10):
current = await self.accuweather.async_get_current_conditions()
if self.forecast:
forecast = await self.accuweather.async_get_daily_forecast()
except (
ApiError,
ClientConnectorError,
InvalidApiKeyError,
RequestsExceededError,
) as error:
raise UpdateFailed(error) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
return {**current, **{ATTR_FORECAST: forecast}}

View File

@@ -10,12 +10,26 @@ from aiohttp import ClientError
from aiohttp.client_exceptions import ClientConnectorError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.schema_config_entry_flow import (
SchemaFlowFormStep,
SchemaOptionsFlowHandler,
)
from .const import DOMAIN
from .const import CONF_FORECAST, DOMAIN
OPTIONS_SCHEMA = vol.Schema(
{
vol.Optional(CONF_FORECAST, default=False): bool,
}
)
OPTIONS_FLOW = {
"init": SchemaFlowFormStep(OPTIONS_SCHEMA),
}
class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
@@ -73,3 +87,9 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
),
errors=errors,
)
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> SchemaOptionsFlowHandler:
"""Options callback for AccuWeather."""
return SchemaOptionsFlowHandler(config_entry, OPTIONS_FLOW)

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from datetime import timedelta
from typing import Final
from homeassistant.components.weather import (
@@ -28,8 +27,10 @@ ATTR_CATEGORY: Final = "Category"
ATTR_DIRECTION: Final = "Direction"
ATTR_ENGLISH: Final = "English"
ATTR_LEVEL: Final = "level"
ATTR_FORECAST: Final = "forecast"
ATTR_SPEED: Final = "Speed"
ATTR_VALUE: Final = "Value"
CONF_FORECAST: Final = "forecast"
DOMAIN: Final = "accuweather"
MANUFACTURER: Final = "AccuWeather, Inc."
MAX_FORECAST_DAYS: Final = 4
@@ -55,5 +56,3 @@ CONDITION_MAP = {
for cond_ha, cond_codes in CONDITION_CLASSES.items()
for cond_code in cond_codes
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)

View File

@@ -1,124 +0,0 @@
"""The AccuWeather coordinator."""
from asyncio import timeout
from datetime import timedelta
import logging
from typing import TYPE_CHECKING, Any
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
from aiohttp.client_exceptions import ClientConnectorError
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import (
DataUpdateCoordinator,
TimestampDataUpdateCoordinator,
UpdateFailed,
)
from .const import DOMAIN, MANUFACTURER
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
_LOGGER = logging.getLogger(__name__)
class AccuWeatherObservationDataUpdateCoordinator(
DataUpdateCoordinator[dict[str, Any]]
):
"""Class to manage fetching AccuWeather data API."""
def __init__(
self,
hass: HomeAssistant,
accuweather: AccuWeather,
name: str,
coordinator_type: str,
update_interval: timedelta,
) -> None:
"""Initialize."""
self.accuweather = accuweather
self.location_key = accuweather.location_key
if TYPE_CHECKING:
assert self.location_key is not None
self.device_info = _get_device_info(self.location_key, name)
super().__init__(
hass,
_LOGGER,
name=f"{name} ({coordinator_type})",
update_interval=update_interval,
)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
try:
async with timeout(10):
result = await self.accuweather.async_get_current_conditions()
except EXCEPTIONS as error:
raise UpdateFailed(error) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
return result
class AccuWeatherDailyForecastDataUpdateCoordinator(
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
):
"""Class to manage fetching AccuWeather data API."""
def __init__(
self,
hass: HomeAssistant,
accuweather: AccuWeather,
name: str,
coordinator_type: str,
update_interval: timedelta,
) -> None:
"""Initialize."""
self.accuweather = accuweather
self.location_key = accuweather.location_key
if TYPE_CHECKING:
assert self.location_key is not None
self.device_info = _get_device_info(self.location_key, name)
super().__init__(
hass,
_LOGGER,
name=f"{name} ({coordinator_type})",
update_interval=update_interval,
)
async def _async_update_data(self) -> list[dict[str, Any]]:
"""Update data via library."""
try:
async with timeout(10):
result = await self.accuweather.async_get_daily_forecast()
except EXCEPTIONS as error:
raise UpdateFailed(error) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
return result
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
"""Get device info."""
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, location_key)},
manufacturer=MANUFACTURER,
name=name,
# You don't need to provide specific details for the URL,
# so passing in _ characters is fine if the location key
# is correct
configuration_url=(
"http://accuweather.com/en/"
f"_/_/{location_key}/weather-forecast/{location_key}/"
),
)

View File

@@ -5,21 +5,27 @@ from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from . import AccuWeatherConfigEntry, AccuWeatherData
from . import AccuWeatherDataUpdateCoordinator
from .const import DOMAIN
TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: AccuWeatherConfigEntry
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
accuweather_data: AccuWeatherData = config_entry.runtime_data
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][
config_entry.entry_id
]
return {
diagnostics_data = {
"config_entry_data": async_redact_data(dict(config_entry.data), TO_REDACT),
"observation_data": accuweather_data.coordinator_observation.data,
"coordinator_data": coordinator.data,
}
return diagnostics_data

View File

@@ -1,51 +0,0 @@
{
"entity": {
"sensor": {
"cloud_ceiling": {
"default": "mdi:weather-fog"
},
"cloud_cover": {
"default": "mdi:weather-cloudy"
},
"cloud_cover_day": {
"default": "mdi:weather-cloudy"
},
"cloud_cover_night": {
"default": "mdi:weather-cloudy"
},
"grass_pollen": {
"default": "mdi:grass"
},
"hours_of_sun": {
"default": "mdi:weather-partly-cloudy"
},
"mold_pollen": {
"default": "mdi:blur"
},
"pressure_tendency": {
"default": "mdi:gauge"
},
"ragweed_pollen": {
"default": "mdi:sprout"
},
"thunderstorm_probability_day": {
"default": "mdi:weather-lightning"
},
"thunderstorm_probability_night": {
"default": "mdi:weather-lightning"
},
"translation_key": {
"default": "mdi:air-filter"
},
"tree_pollen": {
"default": "mdi:tree-outline"
},
"uv_index": {
"default": "mdi:weather-sunny"
},
"uv_index_forecast": {
"default": "mdi:weather-sunny"
}
}
}
}

View File

@@ -8,6 +8,6 @@
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"quality_scale": "platinum",
"requirements": ["accuweather==3.0.0"],
"requirements": ["accuweather==2.1.1"],
"single_config_entry": true
}

View File

@@ -12,13 +12,13 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONCENTRATION_PARTS_PER_CUBIC_METER,
PERCENTAGE,
UV_INDEX,
UnitOfIrradiance,
UnitOfLength,
UnitOfPressure,
UnitOfSpeed,
UnitOfTemperature,
UnitOfTime,
@@ -28,22 +28,20 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AccuWeatherConfigEntry
from . import AccuWeatherDataUpdateCoordinator
from .const import (
API_METRIC,
ATTR_CATEGORY,
ATTR_DIRECTION,
ATTR_ENGLISH,
ATTR_FORECAST,
ATTR_LEVEL,
ATTR_SPEED,
ATTR_VALUE,
ATTRIBUTION,
DOMAIN,
MAX_FORECAST_DAYS,
)
from .coordinator import (
AccuWeatherDailyForecastDataUpdateCoordinator,
AccuWeatherObservationDataUpdateCoordinator,
)
PARALLEL_UPDATES = 1
@@ -54,176 +52,280 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
value_fn: Callable[[dict[str, Any]], str | int | float | None]
attr_fn: Callable[[dict[str, Any]], dict[str, Any]] = lambda _: {}
day: int | None = None
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
AccuWeatherSensorDescription(
key="AirQuality",
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
device_class=SensorDeviceClass.ENUM,
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
translation_key="air_quality",
*(
AccuWeatherSensorDescription(
key="AirQuality",
icon="mdi:air-filter",
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
device_class=SensorDeviceClass.ENUM,
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
translation_key=f"air_quality_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="CloudCoverDay",
entity_registry_enabled_default=False,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key="cloud_cover_day",
*(
AccuWeatherSensorDescription(
key="CloudCoverDay",
icon="mdi:weather-cloudy",
entity_registry_enabled_default=False,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key=f"cloud_cover_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="CloudCoverNight",
entity_registry_enabled_default=False,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key="cloud_cover_night",
*(
AccuWeatherSensorDescription(
key="CloudCoverNight",
icon="mdi:weather-cloudy",
entity_registry_enabled_default=False,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key=f"cloud_cover_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="Grass",
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key="grass_pollen",
*(
AccuWeatherSensorDescription(
key="Grass",
icon="mdi:grass",
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key=f"grass_pollen_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="HoursOfSun",
native_unit_of_measurement=UnitOfTime.HOURS,
value_fn=lambda data: cast(float, data),
translation_key="hours_of_sun",
*(
AccuWeatherSensorDescription(
key="HoursOfSun",
icon="mdi:weather-partly-cloudy",
native_unit_of_measurement=UnitOfTime.HOURS,
value_fn=lambda data: cast(float, data),
translation_key=f"hours_of_sun_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="LongPhraseDay",
value_fn=lambda data: cast(str, data),
translation_key="condition_day",
*(
AccuWeatherSensorDescription(
key="LongPhraseDay",
value_fn=lambda data: cast(str, data),
translation_key=f"condition_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="LongPhraseNight",
value_fn=lambda data: cast(str, data),
translation_key="condition_night",
*(
AccuWeatherSensorDescription(
key="LongPhraseNight",
value_fn=lambda data: cast(str, data),
translation_key=f"condition_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="Mold",
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key="mold_pollen",
*(
AccuWeatherSensorDescription(
key="Mold",
icon="mdi:blur",
entity_registry_enabled_default=False,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key=f"mold_pollen_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="Ragweed",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key="ragweed_pollen",
*(
AccuWeatherSensorDescription(
key="Ragweed",
icon="mdi:sprout",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key=f"ragweed_pollen_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="RealFeelTemperatureMax",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="realfeel_temperature_max",
*(
AccuWeatherSensorDescription(
key="RealFeelTemperatureMax",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"realfeel_temperature_max_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="RealFeelTemperatureMin",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="realfeel_temperature_min",
*(
AccuWeatherSensorDescription(
key="RealFeelTemperatureMin",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"realfeel_temperature_min_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="RealFeelTemperatureShadeMax",
device_class=SensorDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="realfeel_temperature_shade_max",
*(
AccuWeatherSensorDescription(
key="RealFeelTemperatureShadeMax",
device_class=SensorDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"realfeel_temperature_shade_max_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="RealFeelTemperatureShadeMin",
device_class=SensorDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="realfeel_temperature_shade_min",
*(
AccuWeatherSensorDescription(
key="RealFeelTemperatureShadeMin",
device_class=SensorDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"realfeel_temperature_shade_min_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="SolarIrradianceDay",
device_class=SensorDeviceClass.IRRADIANCE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfIrradiance.WATTS_PER_SQUARE_METER,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="solar_irradiance_day",
*(
AccuWeatherSensorDescription(
key="SolarIrradianceDay",
icon="mdi:weather-sunny",
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfIrradiance.WATTS_PER_SQUARE_METER,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"solar_irradiance_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="SolarIrradianceNight",
device_class=SensorDeviceClass.IRRADIANCE,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfIrradiance.WATTS_PER_SQUARE_METER,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key="solar_irradiance_night",
*(
AccuWeatherSensorDescription(
key="SolarIrradianceNight",
icon="mdi:weather-sunny",
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfIrradiance.WATTS_PER_SQUARE_METER,
value_fn=lambda data: cast(float, data[ATTR_VALUE]),
translation_key=f"solar_irradiance_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="ThunderstormProbabilityDay",
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key="thunderstorm_probability_day",
*(
AccuWeatherSensorDescription(
key="ThunderstormProbabilityDay",
icon="mdi:weather-lightning",
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key=f"thunderstorm_probability_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="ThunderstormProbabilityNight",
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key="thunderstorm_probability_night",
*(
AccuWeatherSensorDescription(
key="ThunderstormProbabilityNight",
icon="mdi:weather-lightning",
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key=f"thunderstorm_probability_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="Tree",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key="tree_pollen",
*(
AccuWeatherSensorDescription(
key="Tree",
icon="mdi:tree-outline",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
entity_registry_enabled_default=False,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key=f"tree_pollen_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="UVIndex",
native_unit_of_measurement=UV_INDEX,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key="uv_index_forecast",
*(
AccuWeatherSensorDescription(
key="UVIndex",
icon="mdi:weather-sunny",
native_unit_of_measurement=UV_INDEX,
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
translation_key=f"uv_index_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="WindGustDay",
device_class=SensorDeviceClass.WIND_SPEED,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key="wind_gust_speed_day",
*(
AccuWeatherSensorDescription(
key="WindGustDay",
device_class=SensorDeviceClass.WIND_SPEED,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key=f"wind_gust_speed_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="WindGustNight",
device_class=SensorDeviceClass.WIND_SPEED,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key="wind_gust_speed_night",
*(
AccuWeatherSensorDescription(
key="WindGustNight",
device_class=SensorDeviceClass.WIND_SPEED,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key=f"wind_gust_speed_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="WindDay",
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key="wind_speed_day",
*(
AccuWeatherSensorDescription(
key="WindDay",
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key=f"wind_speed_day_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
AccuWeatherSensorDescription(
key="WindNight",
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key="wind_speed_night",
*(
AccuWeatherSensorDescription(
key="WindNight",
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][ATTR_VALUE]),
attr_fn=lambda data: {"direction": data[ATTR_DIRECTION][ATTR_ENGLISH]},
translation_key=f"wind_speed_night_{day}d",
day=day,
)
for day in range(MAX_FORECAST_DAYS + 1)
),
)
@@ -240,6 +342,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
AccuWeatherSensorDescription(
key="Ceiling",
device_class=SensorDeviceClass.DISTANCE,
icon="mdi:weather-fog",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfLength.METERS,
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
@@ -248,6 +351,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
),
AccuWeatherSensorDescription(
key="CloudCover",
icon="mdi:weather-cloudy",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
@@ -280,15 +384,6 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
translation_key="realfeel_temperature_shade",
),
AccuWeatherSensorDescription(
key="RelativeHumidity",
device_class=SensorDeviceClass.HUMIDITY,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda data: cast(int, data),
translation_key="humidity",
),
AccuWeatherSensorDescription(
key="Precipitation",
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
@@ -298,36 +393,18 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
attr_fn=lambda data: {"type": data["PrecipitationType"]},
translation_key="precipitation",
),
AccuWeatherSensorDescription(
key="Pressure",
device_class=SensorDeviceClass.PRESSURE,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
native_unit_of_measurement=UnitOfPressure.HPA,
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
translation_key="pressure",
),
AccuWeatherSensorDescription(
key="PressureTendency",
device_class=SensorDeviceClass.ENUM,
icon="mdi:gauge",
options=["falling", "rising", "steady"],
value_fn=lambda data: cast(str, data["LocalizedText"]).lower(),
translation_key="pressure_tendency",
),
AccuWeatherSensorDescription(
key="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
translation_key="temperature",
),
AccuWeatherSensorDescription(
key="UVIndex",
icon="mdi:weather-sunny",
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
native_unit_of_measurement=UV_INDEX,
value_fn=lambda data: cast(int, data),
attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]},
@@ -354,7 +431,6 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
AccuWeatherSensorDescription(
key="Wind",
device_class=SensorDeviceClass.WIND_SPEED,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
@@ -373,37 +449,29 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: AccuWeatherConfigEntry,
async_add_entities: AddEntitiesCallback,
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add AccuWeather entities from a config_entry."""
observation_coordinator: AccuWeatherObservationDataUpdateCoordinator = (
entry.runtime_data.coordinator_observation
)
forecast_daily_coordinator: AccuWeatherDailyForecastDataUpdateCoordinator = (
entry.runtime_data.coordinator_daily_forecast
)
sensors: list[AccuWeatherSensor | AccuWeatherForecastSensor] = [
AccuWeatherSensor(observation_coordinator, description)
for description in SENSOR_TYPES
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
sensors = [
AccuWeatherSensor(coordinator, description) for description in SENSOR_TYPES
]
sensors.extend(
[
AccuWeatherForecastSensor(forecast_daily_coordinator, description, day)
for day in range(MAX_FORECAST_DAYS + 1)
for description in FORECAST_SENSOR_TYPES
if description.key in forecast_daily_coordinator.data[day]
]
)
if coordinator.forecast:
for description in FORECAST_SENSOR_TYPES:
# Some air quality/allergy sensors are only available for certain
# locations.
if description.key not in coordinator.data[ATTR_FORECAST][description.day]:
continue
sensors.append(AccuWeatherSensor(coordinator, description))
async_add_entities(sensors)
class AccuWeatherSensor(
CoordinatorEntity[AccuWeatherObservationDataUpdateCoordinator], SensorEntity
CoordinatorEntity[AccuWeatherDataUpdateCoordinator], SensorEntity
):
"""Define an AccuWeather entity."""
@@ -413,15 +481,22 @@ class AccuWeatherSensor(
def __init__(
self,
coordinator: AccuWeatherObservationDataUpdateCoordinator,
coordinator: AccuWeatherDataUpdateCoordinator,
description: AccuWeatherSensorDescription,
) -> None:
"""Initialize."""
super().__init__(coordinator)
self.forecast_day = description.day
self.entity_description = description
self._sensor_data = self._get_sensor_data(coordinator.data, description.key)
self._attr_unique_id = f"{coordinator.location_key}-{description.key}".lower()
self._sensor_data = _get_sensor_data(
coordinator.data, description.key, self.forecast_day
)
if self.forecast_day is not None:
self._attr_unique_id = f"{coordinator.location_key}-{description.key}-{self.forecast_day}".lower()
else:
self._attr_unique_id = (
f"{coordinator.location_key}-{description.key}".lower()
)
self._attr_device_info = coordinator.device_info
@property
@@ -432,80 +507,30 @@ class AccuWeatherSensor(
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the state attributes."""
if self.forecast_day is not None:
return self.entity_description.attr_fn(self._sensor_data)
return self.entity_description.attr_fn(self.coordinator.data)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle data update."""
self._sensor_data = self._get_sensor_data(
self.coordinator.data, self.entity_description.key
)
self.async_write_ha_state()
@staticmethod
def _get_sensor_data(
sensors: dict[str, Any],
kind: str,
) -> Any:
"""Get sensor data."""
if kind == "Precipitation":
return sensors["PrecipitationSummary"]["PastHour"]
return sensors[kind]
class AccuWeatherForecastSensor(
CoordinatorEntity[AccuWeatherDailyForecastDataUpdateCoordinator], SensorEntity
):
"""Define an AccuWeather entity."""
_attr_attribution = ATTRIBUTION
_attr_has_entity_name = True
entity_description: AccuWeatherSensorDescription
def __init__(
self,
coordinator: AccuWeatherDailyForecastDataUpdateCoordinator,
description: AccuWeatherSensorDescription,
forecast_day: int,
) -> None:
"""Initialize."""
super().__init__(coordinator)
self.entity_description = description
self._sensor_data = self._get_sensor_data(
coordinator.data, description.key, forecast_day
)
self._attr_unique_id = (
f"{coordinator.location_key}-{description.key}-{forecast_day}".lower()
)
self._attr_device_info = coordinator.device_info
self._attr_translation_placeholders = {"forecast_day": str(forecast_day)}
self.forecast_day = forecast_day
@property
def native_value(self) -> str | int | float | None:
"""Return the state."""
return self.entity_description.value_fn(self._sensor_data)
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the state attributes."""
return self.entity_description.attr_fn(self._sensor_data)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle data update."""
self._sensor_data = self._get_sensor_data(
self._sensor_data = _get_sensor_data(
self.coordinator.data, self.entity_description.key, self.forecast_day
)
self.async_write_ha_state()
@staticmethod
def _get_sensor_data(
sensors: list[dict[str, Any]],
kind: str,
forecast_day: int,
) -> Any:
"""Get sensor data."""
return sensors[forecast_day][kind]
def _get_sensor_data(
sensors: dict[str, Any],
kind: str,
forecast_day: int | None = None,
) -> Any:
"""Get sensor data."""
if forecast_day is not None:
return sensors[ATTR_FORECAST][forecast_day][kind]
if kind == "Precipitation":
return sensors["PrecipitationSummary"]["PastHour"]
return sensors[kind]

View File

@@ -11,7 +11,7 @@
}
},
"create_entry": {
"default": "Some sensors are not enabled by default. You can enable them in the entity registry after the integration configuration."
"default": "Some sensors are not enabled by default. You can enable them in the entity registry after the integration configuration.\nWeather forecast is not enabled by default. You can enable it in the integration options."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -21,8 +21,8 @@
},
"entity": {
"sensor": {
"air_quality": {
"name": "Air quality day {forecast_day}",
"air_quality_0d": {
"name": "Air quality today",
"state": {
"good": "Good",
"hazardous": "Hazardous",
@@ -32,6 +32,50 @@
"unhealthy": "Unhealthy"
}
},
"air_quality_1d": {
"name": "Air quality day 1",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
},
"air_quality_2d": {
"name": "Air quality day 2",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
},
"air_quality_3d": {
"name": "Air quality day 3",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
},
"air_quality_4d": {
"name": "Air quality day 4",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
},
"apparent_temperature": {
"name": "Apparent temperature"
},
@@ -41,52 +85,240 @@
"cloud_cover": {
"name": "Cloud cover"
},
"cloud_cover_day": {
"name": "Cloud cover day {forecast_day}"
"cloud_cover_day_0d": {
"name": "Cloud cover today"
},
"cloud_cover_night": {
"name": "Cloud cover night {forecast_day}"
"cloud_cover_day_1d": {
"name": "Cloud cover day 1"
},
"condition_day": {
"name": "Condition day {forecast_day}"
"cloud_cover_day_2d": {
"name": "Cloud cover day 2"
},
"condition_night": {
"name": "Condition night {forecast_day}"
"cloud_cover_day_3d": {
"name": "Cloud cover day 3"
},
"cloud_cover_day_4d": {
"name": "Cloud cover day 4"
},
"cloud_cover_night_0d": {
"name": "Cloud cover tonight"
},
"cloud_cover_night_1d": {
"name": "Cloud cover night 1"
},
"cloud_cover_night_2d": {
"name": "Cloud cover night 2"
},
"cloud_cover_night_3d": {
"name": "Cloud cover night 3"
},
"cloud_cover_night_4d": {
"name": "Cloud cover night 4"
},
"condition_day_0d": {
"name": "Condition today"
},
"condition_day_1d": {
"name": "Condition day 1"
},
"condition_day_2d": {
"name": "Condition day 2"
},
"condition_day_3d": {
"name": "Condition day 3"
},
"condition_day_4d": {
"name": "Condition day 4"
},
"condition_night_0d": {
"name": "Condition tonight"
},
"condition_night_1d": {
"name": "Condition night 1"
},
"condition_night_2d": {
"name": "Condition night 2"
},
"condition_night_3d": {
"name": "Condition night 3"
},
"condition_night_4d": {
"name": "Condition night 4"
},
"dew_point": {
"name": "Dew point"
},
"grass_pollen": {
"name": "Grass pollen day {forecast_day}",
"grass_pollen_0d": {
"name": "Grass pollen today",
"state_attributes": {
"level": {
"name": "Level",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"hours_of_sun": {
"name": "Hours of sun day {forecast_day}"
},
"mold_pollen": {
"name": "Mold pollen day {forecast_day}",
"grass_pollen_1d": {
"name": "Grass pollen day 1",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"grass_pollen_2d": {
"name": "Grass pollen day 2",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"grass_pollen_3d": {
"name": "Grass pollen day 3",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"grass_pollen_4d": {
"name": "Grass pollen day 4",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"hours_of_sun_0d": {
"name": "Hours of sun today"
},
"hours_of_sun_1d": {
"name": "Hours of sun day 1"
},
"hours_of_sun_2d": {
"name": "Hours of sun day 2"
},
"hours_of_sun_3d": {
"name": "Hours of sun day 3"
},
"hours_of_sun_4d": {
"name": "Hours of sun day 4"
},
"mold_pollen_0d": {
"name": "Mold pollen today",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"mold_pollen_1d": {
"name": "Mold pollen day 1",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"mold_pollen_2d": {
"name": "Mold pollen day 2",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"mold_pollen_3d": {
"name": "Mold pollen day 3",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"mold_pollen_4d": {
"name": "Mold pollen day 4",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
@@ -102,18 +334,82 @@
"falling": "Falling"
}
},
"ragweed_pollen": {
"name": "Ragweed pollen day {forecast_day}",
"ragweed_pollen_0d": {
"name": "Ragweed pollen today",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"ragweed_pollen_1d": {
"name": "Ragweed pollen day 1",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"ragweed_pollen_2d": {
"name": "Ragweed pollen day 2",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"ragweed_pollen_3d": {
"name": "Ragweed pollen day 3",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"ragweed_pollen_4d": {
"name": "Ragweed pollen day 4",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
@@ -121,45 +417,205 @@
"realfeel_temperature": {
"name": "RealFeel temperature"
},
"realfeel_temperature_max": {
"name": "RealFeel temperature max day {forecast_day}"
"realfeel_temperature_max_0d": {
"name": "RealFeel temperature max today"
},
"realfeel_temperature_min": {
"name": "RealFeel temperature min day {forecast_day}"
"realfeel_temperature_max_1d": {
"name": "RealFeel temperature max day 1"
},
"realfeel_temperature_max_2d": {
"name": "RealFeel temperature max day 2"
},
"realfeel_temperature_max_3d": {
"name": "RealFeel temperature max day 3"
},
"realfeel_temperature_max_4d": {
"name": "RealFeel temperature max day 4"
},
"realfeel_temperature_min_0d": {
"name": "RealFeel temperature min today"
},
"realfeel_temperature_min_1d": {
"name": "RealFeel temperature min day 1"
},
"realfeel_temperature_min_2d": {
"name": "RealFeel temperature min day 2"
},
"realfeel_temperature_min_3d": {
"name": "RealFeel temperature min day 3"
},
"realfeel_temperature_min_4d": {
"name": "RealFeel temperature min day 4"
},
"realfeel_temperature_shade": {
"name": "RealFeel temperature shade"
},
"realfeel_temperature_shade_max": {
"name": "RealFeel temperature shade max day {forecast_day}"
"realfeel_temperature_shade_max_0d": {
"name": "RealFeel temperature shade max today"
},
"realfeel_temperature_shade_min": {
"name": "RealFeel temperature shade min day {forecast_day}"
"realfeel_temperature_shade_max_1d": {
"name": "RealFeel temperature shade max day 1"
},
"solar_irradiance_day": {
"name": "Solar irradiance day {forecast_day}"
"realfeel_temperature_shade_max_2d": {
"name": "RealFeel temperature shade max day 2"
},
"solar_irradiance_night": {
"name": "Solar irradiance night {forecast_day}"
"realfeel_temperature_shade_max_3d": {
"name": "RealFeel temperature shade max day 3"
},
"thunderstorm_probability_day": {
"name": "Thunderstorm probability day {forecast_day}"
"realfeel_temperature_shade_max_4d": {
"name": "RealFeel temperature shade max day 4"
},
"thunderstorm_probability_night": {
"name": "Thunderstorm probability night {forecast_day}"
"realfeel_temperature_shade_min_0d": {
"name": "RealFeel temperature shade min today"
},
"tree_pollen": {
"name": "Tree pollen day {forecast_day}",
"realfeel_temperature_shade_min_1d": {
"name": "RealFeel temperature shade min day 1"
},
"realfeel_temperature_shade_min_2d": {
"name": "RealFeel temperature shade min day 2"
},
"realfeel_temperature_shade_min_3d": {
"name": "RealFeel temperature shade min day 3"
},
"realfeel_temperature_shade_min_4d": {
"name": "RealFeel temperature shade min day 4"
},
"solar_irradiance_day_0d": {
"name": "Solar irradiance today"
},
"solar_irradiance_day_1d": {
"name": "Solar irradiance day 1"
},
"solar_irradiance_day_2d": {
"name": "Solar irradiance day 2"
},
"solar_irradiance_day_3d": {
"name": "Solar irradiance day 3"
},
"solar_irradiance_day_4d": {
"name": "Solar irradiance day 4"
},
"solar_irradiance_night_0d": {
"name": "Solar irradiance tonight"
},
"solar_irradiance_night_1d": {
"name": "Solar irradiance night 1"
},
"solar_irradiance_night_2d": {
"name": "Solar irradiance night 2"
},
"solar_irradiance_night_3d": {
"name": "Solar irradiance night 3"
},
"solar_irradiance_night_4d": {
"name": "Solar irradiance night 4"
},
"thunderstorm_probability_day_0d": {
"name": "Thunderstorm probability today"
},
"thunderstorm_probability_day_1d": {
"name": "Thunderstorm probability day 1"
},
"thunderstorm_probability_day_2d": {
"name": "Thunderstorm probability day 2"
},
"thunderstorm_probability_day_3d": {
"name": "Thunderstorm probability day 3"
},
"thunderstorm_probability_day_4d": {
"name": "Thunderstorm probability day 4"
},
"thunderstorm_probability_night_0d": {
"name": "Thunderstorm probability tonight"
},
"thunderstorm_probability_night_1d": {
"name": "Thunderstorm probability night 1"
},
"thunderstorm_probability_night_2d": {
"name": "Thunderstorm probability night 2"
},
"thunderstorm_probability_night_3d": {
"name": "Thunderstorm probability night 3"
},
"thunderstorm_probability_night_4d": {
"name": "Thunderstorm probability night 4"
},
"tree_pollen_0d": {
"name": "Tree pollen today",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"tree_pollen_1d": {
"name": "Tree pollen day 1",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"tree_pollen_2d": {
"name": "Tree pollen day 2",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"tree_pollen_3d": {
"name": "Tree pollen day 3",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"tree_pollen_4d": {
"name": "Tree pollen day 4",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
@@ -168,30 +624,94 @@
"name": "UV index",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"uv_index_forecast": {
"name": "UV index day {forecast_day}",
"uv_index_0d": {
"name": "UV index today",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"uv_index_1d": {
"name": "UV index day 1",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"uv_index_2d": {
"name": "UV index day 2",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"uv_index_3d": {
"name": "UV index day 3",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
},
"uv_index_4d": {
"name": "UV index day 4",
"state_attributes": {
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen_0d::state_attributes::level::name%]",
"state": {
"good": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::good%]",
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::hazardous%]",
"high": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::moderate%]",
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality_0d::state::unhealthy%]"
}
}
}
@@ -208,17 +728,75 @@
"wind_gust_speed": {
"name": "[%key:component::weather::entity_component::_::state_attributes::wind_gust_speed::name%]"
},
"wind_gust_speed_day": {
"name": "Wind gust speed day {forecast_day}"
"wind_gust_speed_day_0d": {
"name": "Wind gust speed today"
},
"wind_gust_speed_night": {
"name": "Wind gust speed night {forecast_day}"
"wind_gust_speed_day_1d": {
"name": "Wind gust speed day 1"
},
"wind_speed_day": {
"name": "Wind speed day {forecast_day}"
"wind_gust_speed_day_2d": {
"name": "Wind gust speed day 2"
},
"wind_speed_night": {
"name": "Wind speed night {forecast_day}"
"wind_gust_speed_day_3d": {
"name": "Wind gust speed day 3"
},
"wind_gust_speed_day_4d": {
"name": "Wind gust speed day 4"
},
"wind_gust_speed_night_0d": {
"name": "Wind gust speed tonight"
},
"wind_gust_speed_night_1d": {
"name": "Wind gust speed night 1"
},
"wind_gust_speed_night_2d": {
"name": "Wind gust speed night 2"
},
"wind_gust_speed_night_3d": {
"name": "Wind gust speed night 3"
},
"wind_gust_speed_night_4d": {
"name": "Wind gust speed night 4"
},
"wind_speed_day_0d": {
"name": "Wind speed today"
},
"wind_speed_day_1d": {
"name": "Wind speed day 1"
},
"wind_speed_day_2d": {
"name": "Wind speed day 2"
},
"wind_speed_day_3d": {
"name": "Wind speed day 3"
},
"wind_speed_day_4d": {
"name": "Wind speed day 4"
},
"wind_speed_night_0d": {
"name": "Wind speed tonight"
},
"wind_speed_night_1d": {
"name": "Wind speed night 1"
},
"wind_speed_night_2d": {
"name": "Wind speed night 2"
},
"wind_speed_night_3d": {
"name": "Wind speed night 3"
},
"wind_speed_night_4d": {
"name": "Wind speed night 4"
}
}
},
"options": {
"step": {
"init": {
"description": "Due to the limitations of the free version of the AccuWeather API key, when you enable weather forecast, data updates will be performed every 80 minutes instead of every 40 minutes.",
"data": {
"forecast": "Weather forecast"
}
}
}
},

View File

@@ -9,7 +9,6 @@ from accuweather.const import ENDPOINT
from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
from . import AccuWeatherConfigEntry
from .const import DOMAIN
@@ -23,11 +22,9 @@ def async_register(
async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
"""Get info for the info page."""
config_entry: AccuWeatherConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
remaining_requests = (
config_entry.runtime_data.coordinator_observation.accuweather.requests_remaining
)
remaining_requests = list(hass.data[DOMAIN].values())[
0
].accuweather.requests_remaining
return {
"can_reach_server": system_health.async_check_can_reach_url(hass, ENDPOINT),

View File

@@ -7,7 +7,6 @@ from typing import cast
from homeassistant.components.weather import (
ATTR_FORECAST_CLOUD_COVERAGE,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_HUMIDITY,
ATTR_FORECAST_NATIVE_APPARENT_TEMP,
ATTR_FORECAST_NATIVE_PRECIPITATION,
ATTR_FORECAST_NATIVE_TEMP,
@@ -18,10 +17,11 @@ from homeassistant.components.weather import (
ATTR_FORECAST_TIME,
ATTR_FORECAST_UV_INDEX,
ATTR_FORECAST_WIND_BEARING,
CoordinatorWeatherEntity,
Forecast,
SingleCoordinatorWeatherEntity,
WeatherEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
UnitOfLength,
UnitOfPrecipitationDepth,
@@ -33,158 +33,129 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.dt import utc_from_timestamp
from . import AccuWeatherConfigEntry, AccuWeatherData
from . import AccuWeatherDataUpdateCoordinator
from .const import (
API_METRIC,
ATTR_DIRECTION,
ATTR_FORECAST,
ATTR_SPEED,
ATTR_VALUE,
ATTRIBUTION,
CONDITION_MAP,
)
from .coordinator import (
AccuWeatherDailyForecastDataUpdateCoordinator,
AccuWeatherObservationDataUpdateCoordinator,
DOMAIN,
)
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,
entry: AccuWeatherConfigEntry,
async_add_entities: AddEntitiesCallback,
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add a AccuWeather weather entity from a config_entry."""
async_add_entities([AccuWeatherEntity(entry.runtime_data)])
coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
async_add_entities([AccuWeatherEntity(coordinator)])
class AccuWeatherEntity(
CoordinatorWeatherEntity[
AccuWeatherObservationDataUpdateCoordinator,
AccuWeatherDailyForecastDataUpdateCoordinator,
]
SingleCoordinatorWeatherEntity[AccuWeatherDataUpdateCoordinator]
):
"""Define an AccuWeather entity."""
_attr_has_entity_name = True
_attr_name = None
def __init__(self, accuweather_data: AccuWeatherData) -> None:
def __init__(self, coordinator: AccuWeatherDataUpdateCoordinator) -> None:
"""Initialize."""
super().__init__(
observation_coordinator=accuweather_data.coordinator_observation,
daily_coordinator=accuweather_data.coordinator_daily_forecast,
)
super().__init__(coordinator)
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
self._attr_native_pressure_unit = UnitOfPressure.HPA
self._attr_native_temperature_unit = UnitOfTemperature.CELSIUS
self._attr_native_visibility_unit = UnitOfLength.KILOMETERS
self._attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
self._attr_unique_id = coordinator.location_key
self._attr_attribution = ATTRIBUTION
self._attr_device_info = accuweather_data.coordinator_observation.device_info
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
self.observation_coordinator = accuweather_data.coordinator_observation
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
self._attr_device_info = coordinator.device_info
if self.coordinator.forecast:
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
@property
def condition(self) -> str | None:
"""Return the current condition."""
return CONDITION_MAP.get(self.observation_coordinator.data["WeatherIcon"])
return CONDITION_MAP.get(self.coordinator.data["WeatherIcon"])
@property
def cloud_coverage(self) -> float:
"""Return the Cloud coverage in %."""
return cast(float, self.observation_coordinator.data["CloudCover"])
return cast(float, self.coordinator.data["CloudCover"])
@property
def native_apparent_temperature(self) -> float:
"""Return the apparent temperature."""
return cast(
float,
self.observation_coordinator.data["ApparentTemperature"][API_METRIC][
ATTR_VALUE
],
float, self.coordinator.data["ApparentTemperature"][API_METRIC][ATTR_VALUE]
)
@property
def native_temperature(self) -> float:
"""Return the temperature."""
return cast(
float,
self.observation_coordinator.data["Temperature"][API_METRIC][ATTR_VALUE],
)
return cast(float, self.coordinator.data["Temperature"][API_METRIC][ATTR_VALUE])
@property
def native_pressure(self) -> float:
"""Return the pressure."""
return cast(
float, self.observation_coordinator.data["Pressure"][API_METRIC][ATTR_VALUE]
)
return cast(float, self.coordinator.data["Pressure"][API_METRIC][ATTR_VALUE])
@property
def native_dew_point(self) -> float:
"""Return the dew point."""
return cast(
float, self.observation_coordinator.data["DewPoint"][API_METRIC][ATTR_VALUE]
)
return cast(float, self.coordinator.data["DewPoint"][API_METRIC][ATTR_VALUE])
@property
def humidity(self) -> int:
"""Return the humidity."""
return cast(int, self.observation_coordinator.data["RelativeHumidity"])
return cast(int, self.coordinator.data["RelativeHumidity"])
@property
def native_wind_gust_speed(self) -> float:
"""Return the wind gust speed."""
return cast(
float,
self.observation_coordinator.data["WindGust"][ATTR_SPEED][API_METRIC][
ATTR_VALUE
],
float, self.coordinator.data["WindGust"][ATTR_SPEED][API_METRIC][ATTR_VALUE]
)
@property
def native_wind_speed(self) -> float:
"""Return the wind speed."""
return cast(
float,
self.observation_coordinator.data["Wind"][ATTR_SPEED][API_METRIC][
ATTR_VALUE
],
float, self.coordinator.data["Wind"][ATTR_SPEED][API_METRIC][ATTR_VALUE]
)
@property
def wind_bearing(self) -> int:
"""Return the wind bearing."""
return cast(
int, self.observation_coordinator.data["Wind"][ATTR_DIRECTION]["Degrees"]
)
return cast(int, self.coordinator.data["Wind"][ATTR_DIRECTION]["Degrees"])
@property
def native_visibility(self) -> float:
"""Return the visibility."""
return cast(
float,
self.observation_coordinator.data["Visibility"][API_METRIC][ATTR_VALUE],
)
return cast(float, self.coordinator.data["Visibility"][API_METRIC][ATTR_VALUE])
@property
def uv_index(self) -> float:
"""Return the UV index."""
return cast(float, self.observation_coordinator.data["UVIndex"])
return cast(float, self.coordinator.data["UVIndex"])
@callback
def _async_forecast_daily(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
@property
def forecast(self) -> list[Forecast] | None:
"""Return the forecast array."""
if not self.coordinator.forecast:
return None
# remap keys from library to keys understood by the weather component
return [
{
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCoverDay"],
ATTR_FORECAST_HUMIDITY: item["RelativeHumidityDay"]["Average"],
ATTR_FORECAST_NATIVE_TEMP: item["TemperatureMax"][ATTR_VALUE],
ATTR_FORECAST_NATIVE_TEMP_LOW: item["TemperatureMin"][ATTR_VALUE],
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperatureMax"][
@@ -204,5 +175,10 @@ class AccuWeatherEntity(
ATTR_FORECAST_WIND_BEARING: item["WindDay"][ATTR_DIRECTION]["Degrees"],
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["IconDay"]),
}
for item in self.daily_coordinator.data
for item in self.coordinator.data[ATTR_FORECAST]
]
@callback
def _async_forecast_daily(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
return self.forecast

View File

@@ -9,10 +9,7 @@ from typing import Any
import serial
import voluptuous as vol
from homeassistant.components.switch import (
PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA,
SwitchEntity,
)
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_FILENAME,
CONF_NAME,
@@ -41,7 +38,7 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend(
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.isdevice,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
@@ -81,7 +78,7 @@ class AcerSwitch(SwitchEntity):
write_timeout: int,
) -> None:
"""Init of the Acer projector."""
self.serial = serial.Serial(
self.ser = serial.Serial(
port=serial_port, timeout=timeout, write_timeout=write_timeout
)
self._serial_port = serial_port
@@ -99,16 +96,16 @@ class AcerSwitch(SwitchEntity):
# was disconnected during runtime.
# This way the projector can be reconnected and will still work
try:
if not self.serial.is_open:
self.serial.open()
self.serial.write(msg.encode("utf-8"))
if not self.ser.is_open:
self.ser.open()
self.ser.write(msg.encode("utf-8"))
# Size is an experience value there is no real limit.
# AFAIK there is no limit and no end character so we will usually
# need to wait for timeout
ret = self.serial.read_until(size=20).decode("utf-8")
ret = self.ser.read_until(size=20).decode("utf-8")
except serial.SerialException:
_LOGGER.error("Problem communicating with %s", self._serial_port)
self.serial.close()
self.ser.close()
return ret
def _write_read_format(self, msg: str) -> str:

View File

@@ -3,53 +3,31 @@
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
import homeassistant.helpers.entity_registry as er
from .const import DOMAIN
from .hub import PulseHub
CONF_HUBS = "hubs"
PLATFORMS = [Platform.COVER, Platform.SENSOR]
type AcmedaConfigEntry = ConfigEntry[PulseHub]
async def async_setup_entry(
hass: HomeAssistant, config_entry: AcmedaConfigEntry
) -> bool:
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up Rollease Acmeda Automate hub from a config entry."""
await _migrate_unique_ids(hass, config_entry)
hub = PulseHub(hass, config_entry)
if not await hub.async_setup():
return False
config_entry.runtime_data = hub
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = hub
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True
async def _migrate_unique_ids(hass: HomeAssistant, entry: AcmedaConfigEntry) -> None:
"""Migrate pre-config flow unique ids."""
entity_registry = er.async_get(hass)
registry_entries = er.async_entries_for_config_entry(
entity_registry, entry.entry_id
)
for reg_entry in registry_entries:
if isinstance(reg_entry.unique_id, int): # type: ignore[unreachable]
entity_registry.async_update_entity( # type: ignore[unreachable]
reg_entry.entity_id, new_unique_id=str(reg_entry.unique_id)
)
async def async_unload_entry(
hass: HomeAssistant, config_entry: AcmedaConfigEntry
) -> bool:
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
hub = config_entry.runtime_data
hub = hass.data[DOMAIN][config_entry.entry_id]
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
@@ -58,4 +36,7 @@ async def async_unload_entry(
if not await hub.async_reset():
return False
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok

View File

@@ -11,7 +11,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import ACMEDA_ENTITY_REMOVE, DOMAIN, LOGGER
class AcmedaEntity(entity.Entity):
class AcmedaBase(entity.Entity):
"""Base representation of an Acmeda roller."""
_attr_should_poll = False
@@ -67,7 +67,7 @@ class AcmedaEntity(entity.Entity):
@property
def unique_id(self) -> str:
"""Return the unique ID of this roller."""
return str(self.roller.id)
return self.roller.id # type: ignore[no-any-return]
@property
def device_id(self) -> str:

View File

@@ -9,23 +9,24 @@ from homeassistant.components.cover import (
CoverEntity,
CoverEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AcmedaConfigEntry
from .const import ACMEDA_HUB_UPDATE
from .entity import AcmedaEntity
from .base import AcmedaBase
from .const import ACMEDA_HUB_UPDATE, DOMAIN
from .helpers import async_add_acmeda_entities
from .hub import PulseHub
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AcmedaConfigEntry,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Acmeda Rollers from a config entry."""
hub = config_entry.runtime_data
hub: PulseHub = hass.data[DOMAIN][config_entry.entry_id]
current: set[int] = set()
@@ -44,7 +45,7 @@ async def async_setup_entry(
)
class AcmedaCover(AcmedaEntity, CoverEntity):
class AcmedaCover(AcmedaBase, CoverEntity):
"""Representation of an Acmeda cover device."""
_attr_name = None

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from aiopulse import Roller
from homeassistant.config_entries import ConfigEntry
@@ -13,20 +11,17 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN, LOGGER
if TYPE_CHECKING:
from . import AcmedaConfigEntry
@callback
def async_add_acmeda_entities(
hass: HomeAssistant,
entity_class: type,
config_entry: AcmedaConfigEntry,
config_entry: ConfigEntry,
current: set[int],
async_add_entities: AddEntitiesCallback,
) -> None:
"""Add any new entities."""
hub = config_entry.runtime_data
hub = hass.data[DOMAIN][config_entry.entry_id]
LOGGER.debug("Looking for new %s on: %s", entity_class.__name__, hub.host)
api = hub.api.rollers

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/acmeda",
"iot_class": "local_push",
"loggers": ["aiopulse"],
"requirements": ["aiopulse==0.4.6"]
"requirements": ["aiopulse==0.4.4"]
}

View File

@@ -3,24 +3,25 @@
from __future__ import annotations
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AcmedaConfigEntry
from .const import ACMEDA_HUB_UPDATE
from .entity import AcmedaEntity
from .base import AcmedaBase
from .const import ACMEDA_HUB_UPDATE, DOMAIN
from .helpers import async_add_acmeda_entities
from .hub import PulseHub
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AcmedaConfigEntry,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Acmeda Rollers from a config entry."""
hub = config_entry.runtime_data
hub: PulseHub = hass.data[DOMAIN][config_entry.entry_id]
current: set[int] = set()
@@ -39,7 +40,7 @@ async def async_setup_entry(
)
class AcmedaBattery(AcmedaEntity, SensorEntity):
class AcmedaBattery(AcmedaBase, SensorEntity):
"""Representation of an Acmeda cover sensor."""
_attr_device_class = SensorDeviceClass.BATTERY

View File

@@ -9,7 +9,7 @@ from typing import Final
LEASES_REGEX: Final[re.Pattern[str]] = re.compile(
r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})"
r"\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))"
r"\svalid\sfor:\s(?P<timevalid>(-?\d+))"
r"\ssec"
+ r"\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))"
+ r"\svalid\sfor:\s(?P<timevalid>(-?\d+))"
+ r"\ssec"
)

View File

@@ -9,8 +9,8 @@ from typing import Final
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN as DEVICE_TRACKER_DOMAIN,
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
DOMAIN,
PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
@@ -23,7 +23,7 @@ from .model import Device
_LOGGER: Final = logging.getLogger(__name__)
PLATFORM_SCHEMA: Final = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
PLATFORM_SCHEMA: Final = BASE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
@@ -36,7 +36,7 @@ def get_scanner(
hass: HomeAssistant, config: ConfigType
) -> ActiontecDeviceScanner | None:
"""Validate the configuration and return an Actiontec scanner."""
scanner = ActiontecDeviceScanner(config[DEVICE_TRACKER_DOMAIN])
scanner = ActiontecDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
@@ -51,6 +51,7 @@ class ActiontecDeviceScanner(DeviceScanner):
self.last_results: list[Device] = []
data = self.get_actiontec_data()
self.success_init = data is not None
_LOGGER.info("Scanner initialized")
def scan_devices(self) -> list[str]:
"""Scan for new devices and return a list with found device IDs."""
@@ -69,7 +70,7 @@ class ActiontecDeviceScanner(DeviceScanner):
Return boolean if scanning successful.
"""
_LOGGER.debug("Scanning")
_LOGGER.info("Scanning")
if not self.success_init:
return False
@@ -78,7 +79,7 @@ class ActiontecDeviceScanner(DeviceScanner):
self.last_results = [
device for device in actiontec_data if device.timevalid > -60
]
_LOGGER.debug("Scan successful")
_LOGGER.info("Scan successful")
return True
def get_actiontec_data(self) -> list[Device] | None:

View File

@@ -135,15 +135,11 @@ class AdaxDevice(ClimateEntity):
class LocalAdaxDevice(ClimateEntity):
"""Representation of a heater."""
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
_attr_hvac_modes = [HVACMode.HEAT]
_attr_hvac_mode = HVACMode.HEAT
_attr_max_temp = 35
_attr_min_temp = 5
_attr_supported_features = (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TURN_OFF
| ClimateEntityFeature.TURN_ON
)
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
@@ -156,14 +152,6 @@ class LocalAdaxDevice(ClimateEntity):
manufacturer="Adax",
)
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set hvac mode."""
if hvac_mode == HVACMode.HEAT:
temperature = self._attr_target_temperature or self._attr_min_temp
await self._adax_data_handler.set_target_temperature(temperature)
elif hvac_mode == HVACMode.OFF:
await self._adax_data_handler.set_target_temperature(0)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
@@ -173,14 +161,6 @@ class LocalAdaxDevice(ClimateEntity):
async def async_update(self) -> None:
"""Get the latest data."""
data = await self._adax_data_handler.get_status()
self._attr_target_temperature = data["target_temperature"]
self._attr_current_temperature = data["current_temperature"]
self._attr_available = self._attr_current_temperature is not None
if (target_temp := data["target_temperature"]) == 0:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
if target_temp == 0:
self._attr_target_temperature = self._attr_min_temp
else:
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
self._attr_target_temperature = target_temp

View File

@@ -130,7 +130,7 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
async_get_clientsession(self.hass), account_id, password
)
if token is None:
_LOGGER.debug("Adax: Failed to login to retrieve token")
_LOGGER.info("Adax: Failed to login to retrieve token")
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="cloud",

View File

@@ -2,12 +2,10 @@
from __future__ import annotations
from dataclasses import dataclass
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
@@ -26,6 +24,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_FORCE,
DATA_ADGUARD_CLIENT,
DOMAIN,
SERVICE_ADD_URL,
SERVICE_DISABLE_URL,
@@ -43,18 +42,9 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
)
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
@dataclass
class AdGuardData:
"""Adguard data type."""
client: AdGuardHome
version: str
async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up AdGuard Home from a config entry."""
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
adguard = AdGuardHome(
@@ -67,13 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
session=session,
)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {DATA_ADGUARD_CLIENT: adguard}
try:
version = await adguard.version()
await adguard.version()
except AdGuardHomeConnectionError as exception:
raise ConfigEntryNotReady from exception
entry.runtime_data = AdGuardData(adguard, version)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
async def add_url(call: ServiceCall) -> None:
@@ -117,20 +107,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
return True
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload AdGuard Home config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# This is the last loaded instance of AdGuard, deregister any services
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
if not hass.data[DOMAIN]:
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
hass.services.async_remove(DOMAIN, SERVICE_ENABLE_URL)
hass.services.async_remove(DOMAIN, SERVICE_DISABLE_URL)
hass.services.async_remove(DOMAIN, SERVICE_REFRESH)
del hass.data[DOMAIN]
return unload_ok

View File

@@ -6,6 +6,9 @@ DOMAIN = "adguard"
LOGGER = logging.getLogger(__package__)
DATA_ADGUARD_CLIENT = "adguard_client"
DATA_ADGUARD_VERSION = "adguard_version"
CONF_FORCE = "force"
SERVICE_ADD_URL = "add_url"

View File

@@ -2,14 +2,13 @@
from __future__ import annotations
from adguardhome import AdGuardHomeError
from adguardhome import AdGuardHome, AdGuardHomeError
from homeassistant.config_entries import SOURCE_HASSIO
from homeassistant.config_entries import SOURCE_HASSIO, ConfigEntry
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import Entity
from . import AdGuardConfigEntry, AdGuardData
from .const import DOMAIN, LOGGER
from .const import DATA_ADGUARD_VERSION, DOMAIN, LOGGER
class AdGuardHomeEntity(Entity):
@@ -20,13 +19,12 @@ class AdGuardHomeEntity(Entity):
def __init__(
self,
data: AdGuardData,
entry: AdGuardConfigEntry,
adguard: AdGuardHome,
entry: ConfigEntry,
) -> None:
"""Initialize the AdGuard Home entity."""
self._entry = entry
self.data = data
self.adguard = data.client
self.adguard = adguard
async def async_update(self) -> None:
"""Update AdGuard Home entity."""
@@ -46,7 +44,7 @@ class AdGuardHomeEntity(Entity):
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
raise NotImplementedError
raise NotImplementedError()
@property
def device_info(self) -> DeviceInfo:
@@ -70,6 +68,8 @@ class AdGuardHomeEntity(Entity):
},
manufacturer="AdGuard Team",
name="AdGuard Home",
sw_version=self.data.version,
sw_version=self.hass.data[DOMAIN][self._entry.entry_id].get(
DATA_ADGUARD_VERSION
),
configuration_url=config_url,
)

View File

@@ -66,20 +66,10 @@
}
},
"services": {
"add_url": {
"service": "mdi:link-plus"
},
"remove_url": {
"service": "mdi:link-off"
},
"enable_url": {
"service": "mdi:link-variant"
},
"disable_url": {
"service": "mdi:link-variant-off"
},
"refresh": {
"service": "mdi:refresh"
}
"add_url": "mdi:link-plus",
"remove_url": "mdi:link-off",
"enable_url": "mdi:link-variant",
"disable_url": "mdi:link-variant-off",
"refresh": "mdi:refresh"
}
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"loggers": ["adguardhome"],
"requirements": ["adguardhome==0.7.0"]
"requirements": ["adguardhome==0.6.3"]
}

View File

@@ -7,15 +7,16 @@ from dataclasses import dataclass
from datetime import timedelta
from typing import Any
from adguardhome import AdGuardHome
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AdGuardConfigEntry, AdGuardData
from .const import DOMAIN
from .const import DATA_ADGUARD_CLIENT, DATA_ADGUARD_VERSION, DOMAIN
from .entity import AdGuardHomeEntity
SCAN_INTERVAL = timedelta(seconds=300)
@@ -84,14 +85,21 @@ SENSORS: tuple[AdGuardHomeEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: AdGuardConfigEntry,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up AdGuard Home sensor based on a config entry."""
data = entry.runtime_data
adguard = hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_CLIENT]
try:
version = await adguard.version()
except AdGuardHomeConnectionError as exception:
raise PlatformNotReady from exception
hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_VERSION] = version
async_add_entities(
[AdGuardHomeSensor(data, entry, description) for description in SENSORS],
[AdGuardHomeSensor(adguard, entry, description) for description in SENSORS],
True,
)
@@ -103,18 +111,18 @@ class AdGuardHomeSensor(AdGuardHomeEntity, SensorEntity):
def __init__(
self,
data: AdGuardData,
entry: AdGuardConfigEntry,
adguard: AdGuardHome,
entry: ConfigEntry,
description: AdGuardHomeEntityDescription,
) -> None:
"""Initialize AdGuard Home sensor."""
super().__init__(data, entry)
super().__init__(adguard, entry)
self.entity_description = description
self._attr_unique_id = "_".join(
[
DOMAIN,
self.adguard.host,
str(self.adguard.port),
adguard.host,
str(adguard.port),
"sensor",
description.key,
]

View File

@@ -7,14 +7,15 @@ from dataclasses import dataclass
from datetime import timedelta
from typing import Any
from adguardhome import AdGuardHome, AdGuardHomeError
from adguardhome import AdGuardHome, AdGuardHomeConnectionError, AdGuardHomeError
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import AdGuardConfigEntry, AdGuardData
from .const import DOMAIN, LOGGER
from .const import DATA_ADGUARD_CLIENT, DATA_ADGUARD_VERSION, DOMAIN, LOGGER
from .entity import AdGuardHomeEntity
SCAN_INTERVAL = timedelta(seconds=10)
@@ -78,14 +79,21 @@ SWITCHES: tuple[AdGuardHomeSwitchEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: AdGuardConfigEntry,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up AdGuard Home switch based on a config entry."""
data = entry.runtime_data
adguard = hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_CLIENT]
try:
version = await adguard.version()
except AdGuardHomeConnectionError as exception:
raise PlatformNotReady from exception
hass.data[DOMAIN][entry.entry_id][DATA_ADGUARD_VERSION] = version
async_add_entities(
[AdGuardHomeSwitch(data, entry, description) for description in SWITCHES],
[AdGuardHomeSwitch(adguard, entry, description) for description in SWITCHES],
True,
)
@@ -97,21 +105,15 @@ class AdGuardHomeSwitch(AdGuardHomeEntity, SwitchEntity):
def __init__(
self,
data: AdGuardData,
entry: AdGuardConfigEntry,
adguard: AdGuardHome,
entry: ConfigEntry,
description: AdGuardHomeSwitchEntityDescription,
) -> None:
"""Initialize AdGuard Home switch."""
super().__init__(data, entry)
super().__init__(adguard, entry)
self.entity_description = description
self._attr_unique_id = "_".join(
[
DOMAIN,
self.adguard.host,
str(self.adguard.port),
"switch",
description.key,
]
[DOMAIN, adguard.host, str(adguard.port), "switch", description.key]
)
async def async_turn_off(self, **kwargs: Any) -> None:

Some files were not shown because too many files have changed in this diff Show More