Compare commits

..

1 Commits

Author SHA1 Message Date
Paulus Schoutsen
651312fb42 Add frontend dev proxy for WDS 2023-02-27 23:13:58 -05:00
1833 changed files with 31477 additions and 70157 deletions

View File

@@ -125,7 +125,6 @@ tests: &tests
- tests/mock/**
- tests/pylint/**
- tests/scripts/**
- tests/syrupy.py
- tests/test_util/**
- tests/testing_config/**
- tests/util/**

View File

@@ -36,7 +36,6 @@ omit =
homeassistant/components/airnow/__init__.py
homeassistant/components/airnow/sensor.py
homeassistant/components/airq/__init__.py
homeassistant/components/airq/coordinator.py
homeassistant/components/airq/sensor.py
homeassistant/components/airthings/__init__.py
homeassistant/components/airthings/sensor.py
@@ -198,6 +197,7 @@ omit =
homeassistant/components/denonavr/__init__.py
homeassistant/components/denonavr/media_player.py
homeassistant/components/denonavr/receiver.py
homeassistant/components/devolo_home_control/switch.py
homeassistant/components/digital_ocean/*
homeassistant/components/discogs/sensor.py
homeassistant/components/discord/__init__.py
@@ -249,8 +249,7 @@ omit =
homeassistant/components/ecowitt/sensor.py
homeassistant/components/eddystone_temperature/sensor.py
homeassistant/components/edimax/switch.py
homeassistant/components/edl21/__init__.py
homeassistant/components/edl21/sensor.py
homeassistant/components/edl21/*
homeassistant/components/egardia/*
homeassistant/components/eight_sleep/__init__.py
homeassistant/components/eight_sleep/binary_sensor.py
@@ -395,8 +394,7 @@ omit =
homeassistant/components/fritzbox_callmonitor/__init__.py
homeassistant/components/fritzbox_callmonitor/base.py
homeassistant/components/fritzbox_callmonitor/sensor.py
homeassistant/components/frontier_silicon/__init__.py
homeassistant/components/frontier_silicon/browse_media.py
homeassistant/components/frontier_silicon/const.py
homeassistant/components/frontier_silicon/media_player.py
homeassistant/components/futurenow/light.py
homeassistant/components/garadget/cover.py
@@ -518,6 +516,9 @@ omit =
homeassistant/components/ifttt/alarm_control_panel.py
homeassistant/components/iglo/light.py
homeassistant/components/ihc/*
homeassistant/components/imap/__init__.py
homeassistant/components/imap/coordinator.py
homeassistant/components/imap/sensor.py
homeassistant/components/imap_email_content/sensor.py
homeassistant/components/incomfort/*
homeassistant/components/insteon/binary_sensor.py
@@ -626,6 +627,9 @@ omit =
homeassistant/components/lg_netcast/media_player.py
homeassistant/components/lg_soundbar/__init__.py
homeassistant/components/lg_soundbar/media_player.py
homeassistant/components/lidarr/__init__.py
homeassistant/components/lidarr/coordinator.py
homeassistant/components/lidarr/sensor.py
homeassistant/components/life360/__init__.py
homeassistant/components/life360/coordinator.py
homeassistant/components/life360/device_tracker.py
@@ -636,10 +640,8 @@ omit =
homeassistant/components/linux_battery/sensor.py
homeassistant/components/lirc/*
homeassistant/components/livisi/__init__.py
homeassistant/components/livisi/binary_sensor.py
homeassistant/components/livisi/climate.py
homeassistant/components/livisi/coordinator.py
homeassistant/components/livisi/entity.py
homeassistant/components/livisi/switch.py
homeassistant/components/llamalab_automate/notify.py
homeassistant/components/logi_circle/__init__.py
@@ -672,6 +674,7 @@ omit =
homeassistant/components/lyric/api.py
homeassistant/components/lyric/climate.py
homeassistant/components/lyric/sensor.py
homeassistant/components/magicseaweed/sensor.py
homeassistant/components/mailgun/notify.py
homeassistant/components/map/*
homeassistant/components/mastodon/notify.py
@@ -772,11 +775,7 @@ omit =
homeassistant/components/nexia/climate.py
homeassistant/components/nexia/entity.py
homeassistant/components/nexia/switch.py
homeassistant/components/nextcloud/__init__.py
homeassistant/components/nextcloud/binary_sensor.py
homeassistant/components/nextcloud/coordinator.py
homeassistant/components/nextcloud/entity.py
homeassistant/components/nextcloud/sensor.py
homeassistant/components/nextcloud/*
homeassistant/components/nfandroidtv/__init__.py
homeassistant/components/nfandroidtv/notify.py
homeassistant/components/nibe_heatpump/__init__.py
@@ -808,8 +807,6 @@ omit =
homeassistant/components/nuki/sensor.py
homeassistant/components/nx584/alarm_control_panel.py
homeassistant/components/oasa_telematics/sensor.py
homeassistant/components/obihai/__init__.py
homeassistant/components/obihai/button.py
homeassistant/components/obihai/connectivity.py
homeassistant/components/obihai/sensor.py
homeassistant/components/octoprint/__init__.py
@@ -974,23 +971,18 @@ omit =
homeassistant/components/rejseplanen/sensor.py
homeassistant/components/remember_the_milk/__init__.py
homeassistant/components/remote_rpi_gpio/*
homeassistant/components/reolink/__init__.py
homeassistant/components/reolink/binary_sensor.py
homeassistant/components/reolink/button.py
homeassistant/components/reolink/camera.py
homeassistant/components/reolink/entity.py
homeassistant/components/reolink/host.py
homeassistant/components/reolink/light.py
homeassistant/components/reolink/number.py
homeassistant/components/reolink/select.py
homeassistant/components/reolink/siren.py
homeassistant/components/reolink/switch.py
homeassistant/components/reolink/update.py
homeassistant/components/repetier/__init__.py
homeassistant/components/repetier/sensor.py
homeassistant/components/rest/notify.py
homeassistant/components/rest/switch.py
homeassistant/components/ridwell/__init__.py
homeassistant/components/ridwell/calendar.py
homeassistant/components/ridwell/coordinator.py
homeassistant/components/ridwell/switch.py
homeassistant/components/ring/camera.py
@@ -1289,11 +1281,9 @@ omit =
homeassistant/components/touchline/climate.py
homeassistant/components/tplink_lte/*
homeassistant/components/tplink_omada/__init__.py
homeassistant/components/tplink_omada/controller.py
homeassistant/components/tplink_omada/coordinator.py
homeassistant/components/tplink_omada/entity.py
homeassistant/components/tplink_omada/switch.py
homeassistant/components/tplink_omada/update.py
homeassistant/components/traccar/device_tracker.py
homeassistant/components/tractive/__init__.py
homeassistant/components/tractive/binary_sensor.py
@@ -1361,7 +1351,6 @@ omit =
homeassistant/components/velbus/entity.py
homeassistant/components/velbus/light.py
homeassistant/components/velbus/sensor.py
homeassistant/components/velbus/select.py
homeassistant/components/velbus/switch.py
homeassistant/components/velux/__init__.py
homeassistant/components/velux/cover.py
@@ -1379,6 +1368,7 @@ omit =
homeassistant/components/verisure/sensor.py
homeassistant/components/verisure/switch.py
homeassistant/components/versasense/*
homeassistant/components/vesync/__init__.py
homeassistant/components/vesync/common.py
homeassistant/components/vesync/fan.py
homeassistant/components/vesync/light.py
@@ -1508,7 +1498,7 @@ omit =
homeassistant/components/zeversolar/coordinator.py
homeassistant/components/zeversolar/entity.py
homeassistant/components/zeversolar/sensor.py
homeassistant/components/zha/websocket_api.py
homeassistant/components/zha/api.py
homeassistant/components/zha/core/channels/*
homeassistant/components/zha/core/device.py
homeassistant/components/zha/core/gateway.py

View File

@@ -20,6 +20,7 @@
"python.linting.enabled": true,
"python.linting.pylintEnabled": true,
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.flake8Path": "/usr/local/bin/flake8",
"python.linting.pycodestylePath": "/usr/local/bin/pycodestyle",
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle",
"python.linting.mypyPath": "/usr/local/bin/mypy",

View File

@@ -31,9 +31,9 @@ body:
label: What version of Home Assistant Core has the issue?
placeholder: core-
description: >
Can be found in: [Settings ⇒ System ⇒ Repairs ⇒ Three Dots in Upper Right ⇒ System information](https://my.home-assistant.io/redirect/system_health/).
Can be found in: [Settings -> About](https://my.home-assistant.io/redirect/info/).
[![Open your Home Assistant instance and show the system information.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
[![Open your Home Assistant instance and show your Home Assistant version information.](https://my.home-assistant.io/badges/info.svg)](https://my.home-assistant.io/redirect/info/)
- type: input
attributes:
label: What was the last working version of Home Assistant Core?
@@ -46,9 +46,9 @@ body:
attributes:
label: What type of installation are you running?
description: >
Can be found in: [Settings System Repairs Three Dots in Upper Right System information](https://my.home-assistant.io/redirect/system_health/).
Can be found in: [Settings -> System-> Repairs -> Three Dots in Upper Right -> System information](https://my.home-assistant.io/redirect/system_health/).
[![Open your Home Assistant instance and show the system information.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
options:
- Home Assistant OS
- Home Assistant Container

View File

@@ -1,6 +1,6 @@
blank_issues_enabled: false
contact_links:
- name: Report a bug with the UI, Frontend or Dashboards
- name: Report a bug with the UI, Frontend or Lovelace
url: https://github.com/home-assistant/frontend/issues
about: This is the issue tracker for our backend. Please report issues with the UI in the frontend repository.
- name: Report incorrect or missing information on our website

View File

@@ -59,7 +59,6 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
- [ ] Tests have been added to verify that the new code works.
@@ -108,4 +107,3 @@ To help with the load of incoming pull requests:
[manifest-docs]: https://developers.home-assistant.io/docs/en/creating_integration_manifest.html
[quality-scale]: https://developers.home-assistant.io/docs/en/next/integration_quality_scale_index.html
[docs-repository]: https://github.com/home-assistant/home-assistant.io
[perfect-pr]: https://developers.home-assistant.io/docs/review-process/#creating-the-perfect-pr

View File

@@ -24,7 +24,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
with:
fetch-depth: 0
@@ -67,7 +67,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
@@ -105,7 +105,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -198,7 +198,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2023.03.0
uses: home-assistant/builder@2022.11.0
with:
args: |
$BUILD_ARGS \
@@ -232,7 +232,6 @@ jobs:
- khadas-vim3
- odroid-c2
- odroid-c4
- odroid-m1
- odroid-n2
- odroid-xu
- qemuarm
@@ -249,7 +248,7 @@ jobs:
- yellow
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set build additional args
run: |
@@ -276,7 +275,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2023.03.0
uses: home-assistant/builder@2022.11.0
with:
args: |
$BUILD_ARGS \
@@ -292,7 +291,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -331,7 +330,7 @@ jobs:
- "homeassistant"
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Login to DockerHub
if: matrix.registry == 'homeassistant'

View File

@@ -79,7 +79,7 @@ jobs:
runs-on: ubuntu-22.04
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: >-
@@ -203,7 +203,7 @@ jobs:
- info
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.5.0
@@ -212,7 +212,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
uses: actions/cache@v3.2.6
with:
path: venv
key: >-
@@ -227,10 +227,9 @@ jobs:
pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.1
uses: actions/cache@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
@@ -248,7 +247,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
id: python
@@ -257,7 +256,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -266,7 +265,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -286,15 +285,15 @@ jobs:
shopt -s globstar
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
lint-ruff:
name: Check ruff
lint-flake8:
name: Check flake8
runs-on: ubuntu-22.04
needs:
- info
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
id: python
@@ -303,7 +302,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -312,7 +311,56 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8 (fully)
if: needs.info.outputs.test_full_suite == 'true'
run: |
. venv/bin/activate
pre-commit run --hook-stage manual flake8 --all-files
- name: Run flake8 (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
run: |
. venv/bin/activate
shopt -s globstar
pre-commit run --hook-stage manual flake8 --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
lint-ruff:
name: Check ruff
runs-on: ubuntu-22.04
needs:
- info
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -343,7 +391,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
id: python
@@ -352,7 +400,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -361,7 +409,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -381,7 +429,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0
id: python
@@ -390,7 +438,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -399,7 +447,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -407,6 +455,19 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Run pyupgrade (fully)
if: needs.info.outputs.test_full_suite == 'true'
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
- name: Run pyupgrade (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
run: |
. venv/bin/activate
shopt -s globstar
pre-commit run --hook-stage manual pyupgrade --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
- name: Register yamllint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
@@ -487,7 +548,7 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.5.0
@@ -501,16 +562,15 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
uses: actions/cache@v3.2.6
with:
path: venv
lookup-only: true
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore pip wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v3.3.1
uses: actions/cache@v3.2.6
with:
path: ${{ env.PIP_CACHE }}
key: >-
@@ -555,7 +615,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.5.0
@@ -564,7 +624,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -587,7 +647,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.5.0
@@ -596,7 +656,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -620,7 +680,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.5.0
@@ -629,7 +689,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -664,7 +724,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v4.5.0
@@ -680,7 +740,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -688,7 +748,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v3.3.1
uses: actions/cache@v3.2.6
with:
path: .mypy_cache
key: >-
@@ -730,7 +790,7 @@ jobs:
name: Run pip check ${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.5.0
@@ -739,7 +799,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -783,7 +843,7 @@ jobs:
bluez \
ffmpeg
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.5.0
@@ -792,7 +852,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -909,7 +969,7 @@ jobs:
ffmpeg \
libmariadb-dev-compat
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.5.0
@@ -918,7 +978,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache/restore@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -941,10 +1001,6 @@ jobs:
run: |
. venv/bin/activate
pip install mysqlclient sqlalchemy_utils
- name: Compile English translations
run: |
. venv/bin/activate
python3 -m script.translations develop --all
- name: Run pytest (partially)
timeout-minutes: 20
shell: bash
@@ -1017,7 +1073,7 @@ jobs:
ffmpeg \
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4.5.0
@@ -1026,7 +1082,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.1
uses: actions/cache@v3.2.6
with:
path: venv
fail-on-cache-miss: true
@@ -1049,10 +1105,6 @@ jobs:
run: |
. venv/bin/activate
pip install psycopg2 sqlalchemy_utils
- name: Compile English translations
run: |
. venv/bin/activate
python3 -m script.translations develop --all
- name: Run pytest (partially)
timeout-minutes: 20
shell: bash
@@ -1093,17 +1145,14 @@ jobs:
- pytest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v3
- name: Upload coverage to Codecov (full coverage)
if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v3.1.1
with:
fail_ci_if_error: true
flags: full-suite
- name: Upload coverage to Codecov (partial coverage)
if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v3.1.1
with:
fail_ci_if_error: true

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -17,7 +17,7 @@ jobs:
# - No PRs marked as no-stale
# - No issues (-1)
- name: 90 days stale PRs policy
uses: actions/stale@v8.0.0
uses: actions/stale@v7.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 90
@@ -53,7 +53,7 @@ jobs:
# - No issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: 90 days stale issues
uses: actions/stale@v8.0.0
uses: actions/stale@v7.0.0
with:
repo-token: ${{ steps.token.outputs.token }}
days-before-stale: 90
@@ -83,7 +83,7 @@ jobs:
# - No Issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: Needs more information stale issues policy
uses: actions/stale@v8.0.0
uses: actions/stale@v7.0.0
with:
repo-token: ${{ steps.token.outputs.token }}
only-labels: "needs-more-information"

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.5.0

View File

@@ -22,7 +22,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Get information
id: info
@@ -54,9 +54,6 @@ jobs:
# OpenCV headless installation
echo "CI_BUILD=1"
echo "ENABLE_HEADLESS=1"
# Use C-Extension for sqlalchemy
echo "REQUIRE_SQLALCHEMY_CEXT=1"
) > .env_file
- name: Upload env_file
@@ -82,7 +79,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Download env_file
uses: actions/download-artifact@v3
@@ -119,7 +116,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v3.5.0
uses: actions/checkout@v3.3.0
- name: Download env_file
uses: actions/download-artifact@v3
@@ -179,7 +176,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;grpcio;sqlalchemy
skip-binary: aiohttp;grpcio
legacy: true
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
@@ -194,7 +191,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;grpcio;sqlalchemy
skip-binary: aiohttp;grpcio
legacy: true
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"

View File

@@ -1,10 +1,24 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.256
rev: v0.0.247
hooks:
- id: ruff
args:
- --fix
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: [--py310-plus]
stages: [manual]
- repo: https://github.com/PyCQA/autoflake
rev: v2.0.0
hooks:
- id: autoflake
args:
- --in-place
- --remove-all-unused-imports
stages: [manual]
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
@@ -22,6 +36,20 @@ repos:
- --quiet-level=2
exclude_types: [csv, json]
exclude: ^tests/fixtures/|homeassistant/generated/
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- pycodestyle==2.10.0
- pyflakes==3.0.1
- flake8-docstrings==1.6.0
- pydocstyle==6.2.3
- flake8-comprehensions==3.10.1
- flake8-noqa==1.3.0
- mccabe==0.7.0
exclude: docs/source/conf.py
stages: [manual]
- repo: https://github.com/PyCQA/bandit
rev: 1.7.4
hooks:

View File

@@ -4,5 +4,3 @@ azure-*.yml
docs/source/_templates/*
homeassistant/components/*/translations/*.json
homeassistant/generated/*
tests/components/lidarr/fixtures/initialize.js
tests/components/lidarr/fixtures/initialize-wrong.js

View File

@@ -297,7 +297,6 @@ homeassistant.components.tag.*
homeassistant.components.tailscale.*
homeassistant.components.tautulli.*
homeassistant.components.tcp.*
homeassistant.components.threshold.*
homeassistant.components.tibber.*
homeassistant.components.tile.*
homeassistant.components.tilt_ble.*
@@ -311,7 +310,7 @@ homeassistant.components.trafikverket_train.*
homeassistant.components.trafikverket_weatherstation.*
homeassistant.components.tts.*
homeassistant.components.twentemilieu.*
homeassistant.components.unifi.*
homeassistant.components.unifi.update
homeassistant.components.unifiprotect.*
homeassistant.components.upcloud.*
homeassistant.components.update.*

14
.vscode/tasks.json vendored
View File

@@ -42,6 +42,20 @@
},
"problemMatcher": []
},
{
"label": "Flake8",
"type": "shell",
"command": "pre-commit run flake8 --all-files",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff",
"type": "shell",

View File

@@ -25,7 +25,7 @@ rules:
comments:
level: error
require-starting-space: true
min-spaces-from-content: 1
min-spaces-from-content: 2
comments-indentation:
level: error
document-end:

View File

@@ -401,7 +401,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/frontend/ @home-assistant/frontend
/tests/components/frontend/ @home-assistant/frontend
/homeassistant/components/frontier_silicon/ @wlcrs
/tests/components/frontier_silicon/ @wlcrs
/homeassistant/components/fully_kiosk/ @cgarwood
/tests/components/fully_kiosk/ @cgarwood
/homeassistant/components/garages_amsterdam/ @klaasnicolaas
@@ -660,8 +659,8 @@ build.json @home-assistant/supervisor
/tests/components/litejet/ @joncar
/homeassistant/components/litterrobot/ @natekspencer @tkdrob
/tests/components/litterrobot/ @natekspencer @tkdrob
/homeassistant/components/livisi/ @StefanIacobLivisi @planbnet
/tests/components/livisi/ @StefanIacobLivisi @planbnet
/homeassistant/components/livisi/ @StefanIacobLivisi
/tests/components/livisi/ @StefanIacobLivisi
/homeassistant/components/local_calendar/ @allenporter
/tests/components/local_calendar/ @allenporter
/homeassistant/components/local_ip/ @issacg
@@ -785,8 +784,7 @@ build.json @home-assistant/supervisor
/tests/components/nexia/ @bdraco
/homeassistant/components/nextbus/ @vividboarder
/tests/components/nextbus/ @vividboarder
/homeassistant/components/nextcloud/ @mib1185
/tests/components/nextcloud/ @mib1185
/homeassistant/components/nextcloud/ @meichthys
/homeassistant/components/nextdns/ @bieniu
/tests/components/nextdns/ @bieniu
/homeassistant/components/nfandroidtv/ @tkdrob
@@ -1058,8 +1056,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/seven_segments/ @fabaff
/homeassistant/components/sfr_box/ @epenet
/tests/components/sfr_box/ @epenet
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
/tests/components/sharkiq/ @JeffResc @funkybunch
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @AritroSaha10
/tests/components/sharkiq/ @JeffResc @funkybunch @AritroSaha10
/homeassistant/components/shell_command/ @home-assistant/core
/tests/components/shell_command/ @home-assistant/core
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
@@ -1103,7 +1101,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/smhi/ @gjohansson-ST
/tests/components/smhi/ @gjohansson-ST
/homeassistant/components/sms/ @ocalvo
/homeassistant/components/snapcast/ @luar123
/homeassistant/components/snooz/ @AustinBrunkhorst
/tests/components/snooz/ @AustinBrunkhorst
/homeassistant/components/solaredge/ @frenck
@@ -1216,6 +1213,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/thethingsnetwork/ @fabaff
/homeassistant/components/thread/ @home-assistant/core
/tests/components/thread/ @home-assistant/core
/homeassistant/components/threshold/ @fabaff
/tests/components/threshold/ @fabaff
/homeassistant/components/tibber/ @danielhiversen
/tests/components/tibber/ @danielhiversen
/homeassistant/components/tile/ @bachya
@@ -1294,8 +1293,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/velux/ @Julius2342
/homeassistant/components/venstar/ @garbled1
/tests/components/venstar/ @garbled1
/homeassistant/components/verisure/ @frenck @niro1987
/tests/components/verisure/ @frenck @niro1987
/homeassistant/components/verisure/ @frenck
/tests/components/verisure/ @frenck
/homeassistant/components/versasense/ @flamm3blemuff1n
/homeassistant/components/version/ @ludeeus
/tests/components/version/ @ludeeus
@@ -1310,8 +1309,6 @@ build.json @home-assistant/supervisor
/tests/components/vizio/ @raman325
/homeassistant/components/vlc_telnet/ @rodripf @MartinHjelmare
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
/homeassistant/components/voice_assistant/ @balloob @synesthesiam
/tests/components/voice_assistant/ @balloob @synesthesiam
/homeassistant/components/volumio/ @OnFreund
/tests/components/volumio/ @OnFreund
/homeassistant/components/volvooncall/ @molobrakos
@@ -1357,8 +1354,8 @@ build.json @home-assistant/supervisor
/tests/components/wled/ @frenck
/homeassistant/components/wolflink/ @adamkrol93
/tests/components/wolflink/ @adamkrol93
/homeassistant/components/workday/ @fabaff @gjohansson-ST
/tests/components/workday/ @fabaff @gjohansson-ST
/homeassistant/components/workday/ @fabaff
/tests/components/workday/ @fabaff
/homeassistant/components/worldclock/ @fabaff
/tests/components/worldclock/ @fabaff
/homeassistant/components/ws66i/ @ssaenger

View File

@@ -5,6 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Uninstall pre-installed formatting and linting tools
# They would conflict with our pinned versions
RUN pipx uninstall black
RUN pipx uninstall flake8
RUN pipx uninstall pydocstyle
RUN pipx uninstall pycodestyle
RUN pipx uninstall mypy

View File

@@ -4,7 +4,7 @@ Home Assistant |Chat Status|
Open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server.
Check out `home-assistant.io <https://home-assistant.io>`__ for `a
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
demo <https://home-assistant.io/demo/>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|screenshot-states|
@@ -23,6 +23,6 @@ of a component, check the `Home Assistant help section <https://home-assistant.i
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
:target: https://discord.gg/c5DvZ4e
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/master/docs/screenshots.png
:target: https://demo.home-assistant.io
:target: https://home-assistant.io/demo/
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/docs/screenshot-integrations.png
:target: https://home-assistant.io/integrations/

View File

@@ -14,7 +14,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.util import dt as dt_util
from . import auth_store, jwt_wrapper, models
from . import auth_store, models
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
@@ -555,7 +555,9 @@ class AuthManager:
) -> models.RefreshToken | None:
"""Return refresh token if an access token is valid."""
try:
unverif_claims = jwt_wrapper.unverified_hs256_token_decode(token)
unverif_claims = jwt.decode(
token, algorithms=["HS256"], options={"verify_signature": False}
)
except jwt.InvalidTokenError:
return None
@@ -571,9 +573,7 @@ class AuthManager:
issuer = refresh_token.id
try:
jwt_wrapper.verify_and_decode(
token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"]
)
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
except jwt.InvalidTokenError:
return None

View File

@@ -1,116 +0,0 @@
"""Provide a wrapper around JWT that caches decoding tokens.
Since we decode the same tokens over and over again
we can cache the result of the decode of valid tokens
to speed up the process.
"""
from __future__ import annotations
from datetime import timedelta
from functools import lru_cache, partial
from typing import Any
from jwt import DecodeError, PyJWS, PyJWT
from homeassistant.util.json import json_loads
JWT_TOKEN_CACHE_SIZE = 16
MAX_TOKEN_SIZE = 8192
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
"require": []
}
_NO_VERIFY_OPTIONS = {f"verify_{key}": False for key in _VERIFY_KEYS}
class _PyJWSWithLoadCache(PyJWS):
"""PyJWS with a dedicated load implementation."""
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
# We only ever have a global instance of this class
# so we do not have to worry about the LRU growing
# each time we create a new instance.
def _load(self, jwt: str | bytes) -> tuple[bytes, bytes, dict, bytes]:
"""Load a JWS."""
return super()._load(jwt)
_jws = _PyJWSWithLoadCache()
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
def _decode_payload(json_payload: str) -> dict[str, Any]:
"""Decode the payload from a JWS dictionary."""
try:
payload = json_loads(json_payload)
except ValueError as err:
raise DecodeError(f"Invalid payload string: {err}") from err
if not isinstance(payload, dict):
raise DecodeError("Invalid payload string: must be a json object")
return payload
class _PyJWTWithVerify(PyJWT):
"""PyJWT with a fast decode implementation."""
def decode_payload(
self, jwt: str, key: str, options: dict[str, Any], algorithms: list[str]
) -> dict[str, Any]:
"""Decode a JWT's payload."""
if len(jwt) > MAX_TOKEN_SIZE:
# Avoid caching impossible tokens
raise DecodeError("Token too large")
return _decode_payload(
_jws.decode_complete(
jwt=jwt,
key=key,
algorithms=algorithms,
options=options,
)["payload"]
)
def verify_and_decode(
self,
jwt: str,
key: str,
algorithms: list[str],
issuer: str | None = None,
leeway: int | float | timedelta = 0,
options: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Verify a JWT's signature and claims."""
merged_options = {**_VERIFY_OPTIONS, **(options or {})}
payload = self.decode_payload(
jwt=jwt,
key=key,
options=merged_options,
algorithms=algorithms,
)
# These should never be missing since we verify them
# but this is an additional safeguard to make sure
# nothing slips through.
assert "exp" in payload, "exp claim is required"
assert "iat" in payload, "iat claim is required"
self._validate_claims( # type: ignore[no-untyped-call]
payload=payload,
options=merged_options,
issuer=issuer,
leeway=leeway,
)
return payload
_jwt = _PyJWTWithVerify() # type: ignore[no-untyped-call]
verify_and_decode = _jwt.verify_and_decode
unverified_hs256_token_decode = lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)(
partial(
_jwt.decode_payload, key="", algorithms=["HS256"], options=_NO_VERIFY_OPTIONS
)
)
__all__ = [
"unverified_hs256_token_decode",
"verify_and_decode",
]

View File

@@ -6,12 +6,15 @@ from typing import TYPE_CHECKING
import attr
if TYPE_CHECKING:
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import (
device_registry as dev_reg,
entity_registry as ent_reg,
)
@attr.s(slots=True)
class PermissionLookup:
"""Class to hold data for permission lookups."""
entity_registry: er.EntityRegistry = attr.ib()
device_registry: dr.DeviceRegistry = attr.ib()
entity_registry: ent_reg.EntityRegistry = attr.ib()
device_registry: dev_reg.DeviceRegistry = attr.ib()

View File

@@ -8,7 +8,7 @@ from .util.async_ import protect_loop
def enable() -> None:
"""Enable the detection of blocking calls in the event loop."""
# Prevent urllib3 and requests doing I/O in event loop
HTTPConnection.putrequest = protect_loop( # type: ignore[method-assign]
HTTPConnection.putrequest = protect_loop( # type: ignore[assignment]
HTTPConnection.putrequest
)

View File

@@ -31,7 +31,6 @@ from .helpers import (
entity_registry,
issue_registry,
recorder,
template,
)
from .helpers.dispatcher import async_dispatcher_send
from .helpers.typing import ConfigType
@@ -245,7 +244,6 @@ async def load_registries(hass: core.HomeAssistant) -> None:
entity_registry.async_load(hass),
issue_registry.async_load(hass),
hass.async_add_executor_job(_cache_uname_processor),
template.async_load_custom_templates(hass),
)
@@ -510,20 +508,19 @@ async def async_setup_multi_components(
) -> None:
"""Set up multiple domains. Log on failure."""
futures = {
domain: hass.async_create_task(
async_setup_component(hass, domain, config), f"setup component {domain}"
)
domain: hass.async_create_task(async_setup_component(hass, domain, config))
for domain in domains
}
results = await asyncio.gather(*futures.values(), return_exceptions=True)
for idx, domain in enumerate(futures):
result = results[idx]
if isinstance(result, BaseException):
_LOGGER.error(
"Error setting up integration %s - received exception",
domain,
exc_info=(type(result), result, result.__traceback__),
)
await asyncio.wait(futures.values())
errors = [domain for domain in domains if futures[domain].exception()]
for domain in errors:
exception = futures[domain].exception()
assert exception is not None
_LOGGER.error(
"Error setting up integration %s - received exception",
domain,
exc_info=(type(exception), exception, exception.__traceback__),
)
async def _async_set_up_integrations(

View File

@@ -1,5 +0,0 @@
{
"domain": "heltun",
"name": "HELTUN",
"iot_standards": ["zwave"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "homeseer",
"name": "HomeSeer",
"iot_standards": ["zwave"]
}

View File

@@ -1,4 +1,4 @@
"""Contains components that can be plugged into Home Assistant.
"""This package contains components that can be plugged into Home Assistant.
Component design guidelines:
- Each component defines a constant DOMAIN that is equal to its filename.

View File

@@ -40,7 +40,7 @@ def get_scanner(
class ActiontecDeviceScanner(DeviceScanner):
"""Class which queries an actiontec router for connected devices."""
"""This class queries an actiontec router for connected devices."""
def __init__(self, config: ConfigType) -> None:
"""Initialize the scanner."""

View File

@@ -116,30 +116,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
"""Return the current fan modes."""
return ADVANTAGE_AIR_FAN_MODES.get(self._ac["fan"])
async def async_turn_on(self) -> None:
"""Set the HVAC State to on."""
await self.aircon(
{
self.ac_key: {
"info": {
"state": ADVANTAGE_AIR_STATE_ON,
}
}
}
)
async def async_turn_off(self) -> None:
"""Set the HVAC State to off."""
await self.aircon(
{
self.ac_key: {
"info": {
"state": ADVANTAGE_AIR_STATE_OFF,
}
}
}
)
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC Mode and State."""
if hvac_mode == HVACMode.OFF:
@@ -205,32 +181,24 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
"""Return the target temperature."""
return self._zone["setTemp"]
async def async_turn_on(self) -> None:
"""Set the HVAC State to on."""
await self.aircon(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_OPEN}}
}
}
)
async def async_turn_off(self) -> None:
"""Set the HVAC State to off."""
await self.aircon(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_CLOSE}}
}
}
)
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC Mode and State."""
if hvac_mode == HVACMode.OFF:
await self.async_turn_off()
await self.aircon(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_CLOSE}}
}
}
)
else:
await self.async_turn_on()
await self.aircon(
{
self.ac_key: {
"zones": {self.zone_key: {"state": ADVANTAGE_AIR_STATE_OPEN}}
}
}
)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the Temperature."""

View File

@@ -1,16 +1,58 @@
"""The air-Q integration."""
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from datetime import timedelta
import logging
from .const import DOMAIN
from .coordinator import AirQCoordinator
from aioairq import AirQ
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
class AirQCoordinator(DataUpdateCoordinator):
"""Coordinator is responsible for querying the device at a specified route."""
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
) -> None:
"""Initialise a custom coordinator."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
)
session = async_get_clientsession(hass)
self.airq = AirQ(
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
)
self.device_id = entry.unique_id
assert self.device_id is not None
self.device_info = DeviceInfo(
manufacturer=MANUFACTURER,
identifiers={(DOMAIN, self.device_id)},
)
self.device_info.update(entry.data["device_info"])
async def _async_update_data(self) -> dict:
"""Fetch the data from the device."""
data = await self.airq.get(TARGET_ROUTE)
return self.airq.drop_uncertainties_from_data(data)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up air-Q from a config entry."""

View File

@@ -74,11 +74,12 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
)
device_info = await airq.fetch_device_info()
await self.async_set_unique_id(device_info["id"])
await self.async_set_unique_id(device_info.pop("id"))
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=device_info["name"], data=user_input
title=device_info["name"],
data=user_input | {"device_info": device_info},
)
return self.async_show_form(

View File

@@ -1,61 +0,0 @@
"""The air-Q integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from aioairq import AirQ
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, MANUFACTURER, TARGET_ROUTE, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
class AirQCoordinator(DataUpdateCoordinator):
"""Coordinator is responsible for querying the device at a specified route."""
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
) -> None:
"""Initialise a custom coordinator."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
)
session = async_get_clientsession(hass)
self.airq = AirQ(
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
)
self.device_id = entry.unique_id
assert self.device_id is not None
self.device_info = DeviceInfo(
manufacturer=MANUFACTURER,
identifiers={(DOMAIN, self.device_id)},
)
async def _async_update_data(self) -> dict:
"""Fetch the data from the device."""
if "name" not in self.device_info:
info = await self.airq.fetch_device_info()
self.device_info.update(
DeviceInfo(
name=info["name"],
model=info["model"],
sw_version=info["sw_version"],
hw_version=info["hw_version"],
)
)
data = await self.airq.get(TARGET_ROUTE)
return self.airq.drop_uncertainties_from_data(data)

View File

@@ -51,13 +51,6 @@ class AirQEntityDescription(SensorEntityDescription, AirQEntityDescriptionMixin)
# Keys must match those in the data dictionary
SENSOR_TYPES: list[AirQEntityDescription] = [
AirQEntityDescription(
key="c2h4o",
name="Acetaldehyde",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("c2h4o"),
),
AirQEntityDescription(
key="nh3_MR100",
name="Ammonia",
@@ -65,27 +58,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("nh3_MR100"),
),
AirQEntityDescription(
key="ash3",
name="Arsine",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("ash3"),
),
AirQEntityDescription(
key="br2",
name="Bromine",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("br2"),
),
AirQEntityDescription(
key="ch4s",
name="CH4S",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("ch4s"),
),
AirQEntityDescription(
key="cl2_M20",
name="Chlorine",
@@ -93,16 +65,10 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("cl2_M20"),
),
AirQEntityDescription(
key="clo2",
name="ClO2",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("clo2"),
),
AirQEntityDescription(
key="co",
name="CO",
device_class=SensorDeviceClass.CO,
native_unit_of_measurement=CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("co"),
@@ -115,13 +81,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("co2"),
),
AirQEntityDescription(
key="cs2",
name="CS2",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("cs2"),
),
AirQEntityDescription(
key="dewpt",
name="Dew point",
@@ -137,13 +96,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("ethanol"),
),
AirQEntityDescription(
key="c2h4",
name="Ethylene",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("c2h4"),
),
AirQEntityDescription(
key="ch2o_M10",
name="Formaldehyde",
@@ -151,13 +103,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("ch2o_M10"),
),
AirQEntityDescription(
key="f2",
name="Fluorine",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("f2"),
),
AirQEntityDescription(
key="h2s",
name="H2S",
@@ -165,27 +110,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("h2s"),
),
AirQEntityDescription(
key="hcl",
name="HCl",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("hcl"),
),
AirQEntityDescription(
key="hcn",
name="HCN",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("hcn"),
),
AirQEntityDescription(
key="hf",
name="HF",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("hf"),
),
AirQEntityDescription(
key="health",
name="Health Index",
@@ -217,13 +141,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("h2_M1000"),
),
AirQEntityDescription(
key="h2o2",
name="Hydrogen peroxide",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("h2o2"),
),
AirQEntityDescription(
key="ch4_MIPEX",
name="Methane",
@@ -256,11 +173,12 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
value=lambda data: data.get("no2"),
),
AirQEntityDescription(
key="acid_M100",
name="Organic acid",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
key="o3",
name="Ozone",
device_class=SensorDeviceClass.OZONE,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("acid_M100"),
value=lambda data: data.get("o3"),
),
AirQEntityDescription(
key="oxygen",
@@ -270,14 +188,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
value=lambda data: data.get("oxygen"),
icon="mdi:leaf",
),
AirQEntityDescription(
key="o3",
name="Ozone",
device_class=SensorDeviceClass.OZONE,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("o3"),
),
AirQEntityDescription(
key="performance",
name="Performance Index",
@@ -286,13 +196,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
icon="mdi:head-check",
value=lambda data: data.get("performance", 0.0) / 10.0,
),
AirQEntityDescription(
key="ph3",
name="PH3",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("ph3"),
),
AirQEntityDescription(
key="pm1",
name="PM1",
@@ -343,20 +246,6 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("c3h8_MIPEX"),
),
AirQEntityDescription(
key="refigerant",
name="Refrigerant",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("refigerant"),
),
AirQEntityDescription(
key="sih4",
name="SiH4",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("sih4"),
),
AirQEntityDescription(
key="so2",
name="SO2",
@@ -400,6 +289,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
AirQEntityDescription(
key="tvoc",
name="VOC",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("tvoc"),
@@ -407,18 +297,11 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
AirQEntityDescription(
key="tvoc_ionsc",
name="VOC (Industrial)",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("tvoc_ionsc"),
),
AirQEntityDescription(
key="virus",
name="Virus Index",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:virus-off",
value=lambda data: data.get("virus", 0.0),
),
]

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from datetime import timedelta
from typing import Any
from AIOAladdinConnect import AladdinConnectClient, session_manager
from AIOAladdinConnect import AladdinConnectClient
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.config_entries import ConfigEntry
@@ -46,7 +46,7 @@ class AladdinDevice(CoverEntity):
) -> None:
"""Initialize the Aladdin Connect cover."""
self._acc = acc
self._entry_id = entry.entry_id
self._device_id = device["device_id"]
self._number = device["door_number"]
self._name = device["name"]
@@ -85,18 +85,7 @@ class AladdinDevice(CoverEntity):
async def async_update(self) -> None:
"""Update status of cover."""
try:
await self._acc.get_doors(self._serial)
self._attr_available = True
except session_manager.ConnectionError:
self._attr_available = False
except session_manager.InvalidPasswordError:
self._attr_available = False
await self.hass.async_create_task(
self.hass.config_entries.async_reload(self._entry_id)
)
await self._acc.get_doors(self._serial)
@property
def is_closed(self) -> bool | None:

View File

@@ -21,7 +21,7 @@ from homeassistant.const import (
SERVICE_ALARM_TRIGGER,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import get_supported_features
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
@@ -57,11 +57,11 @@ async def async_get_actions(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device actions for Alarm control panel devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
actions = []
# Get all the integrations entities for this device
for entry in er.async_entries_for_device(registry, device_id):
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue

View File

@@ -21,11 +21,7 @@ from homeassistant.const import (
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import (
condition,
config_validation as cv,
entity_registry as er,
)
from homeassistant.helpers import condition, config_validation as cv, entity_registry
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.entity import get_supported_features
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
@@ -68,11 +64,11 @@ async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device conditions for Alarm control panel devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
conditions = []
# Get all the integrations entities for this device
for entry in er.async_entries_for_device(registry, device_id):
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue

View File

@@ -23,7 +23,7 @@ from homeassistant.const import (
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.entity import get_supported_features
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import ConfigType
@@ -57,11 +57,11 @@ async def async_get_triggers(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device triggers for Alarm control panel devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
triggers: list[dict[str, str]] = []
# Get all the integrations entities for this device
for entry in er.async_entries_for_device(registry, device_id):
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue

View File

@@ -26,41 +26,19 @@
"armed_vacation": "{entity_name} armed vacation"
}
},
"entity_component": {
"state": {
"_": {
"name": "[%key:component::alarm_control_panel::title%]",
"state": {
"armed": "Armed",
"disarmed": "Disarmed",
"armed_home": "Armed home",
"armed_away": "Armed away",
"armed_night": "Armed night",
"armed_vacation": "Armed vacation",
"armed_custom_bypass": "Armed custom bypass",
"pending": "Pending",
"arming": "Arming",
"disarming": "Disarming",
"triggered": "Triggered"
},
"state_attributes": {
"code_format": {
"name": "Code format",
"state": {
"text": "Text",
"number": "Number"
}
},
"changed_by": {
"name": "Changed by"
},
"code_arm_required": {
"name": "Code for arming",
"state": {
"true": "Required",
"false": "Not required"
}
}
}
"armed": "Armed",
"disarmed": "Disarmed",
"armed_home": "Armed home",
"armed_away": "Armed away",
"armed_night": "Armed night",
"armed_vacation": "Armed vacation",
"armed_custom_bypass": "Armed custom bypass",
"pending": "Pending",
"arming": "Arming",
"disarming": "Disarming",
"triggered": "Triggered"
}
}
}

View File

@@ -1,13 +1,10 @@
{
"title": "Alert",
"entity_component": {
"state": {
"_": {
"name": "[%key:component::alert::title%]",
"state": {
"idle": "[%key:common::state::idle%]",
"off": "Acknowledged",
"on": "[%key:common::state::active%]"
}
"idle": "[%key:common::state::idle%]",
"off": "Acknowledged",
"on": "[%key:common::state::active%]"
}
}
}

View File

@@ -9,7 +9,7 @@ from .const import API_TEMP_UNITS
class UnsupportedProperty(HomeAssistantError):
"""Does not support the requested Smart Home API property."""
"""This entity does not support the requested Smart Home API property."""
class NoTokenAvailable(HomeAssistantError):

View File

@@ -1,4 +1,5 @@
"""Support for Alexa skill service end point."""
import copy
import hmac
from http import HTTPStatus
import logging
@@ -47,7 +48,7 @@ class AlexaFlashBriefingView(http.HomeAssistantView):
def __init__(self, hass, flash_briefings):
"""Initialize Alexa view."""
super().__init__()
self.flash_briefings = flash_briefings
self.flash_briefings = copy.deepcopy(flash_briefings)
template.attach(hass, self.flash_briefings)
@callback

View File

@@ -34,49 +34,49 @@ CONF_TEXT_TYPE: Final = "text_type"
SUPPORTED_VOICES: Final[list[str]] = [
"Aditi", # Hindi
"Amy", # English (British)
"Aria", # English (New Zealand), Neural
"Amy",
"Aria",
"Arlet", # Catalan, Neural
"Arthur", # English, Neural
"Astrid", # Swedish
"Ayanda", # English (South African), Neural
"Ayanda",
"Bianca", # Italian
"Brian", # English (British)
"Brian",
"Camila", # Portuguese, Brazilian
"Carla", # Italian
"Carla",
"Carmen", # Romanian
"Celine", # French
"Celine",
"Chantal", # French Canadian
"Conchita", # Spanish (European)
"Cristiano", # Portuguese (European)
"Conchita",
"Cristiano",
"Daniel", # German, Neural
"Dora", # Icelandic
"Elin", # Swedish, Neural
"Emma", # English
"Enrique", # Spanish (European)
"Ewa", # Polish
"Enrique",
"Ewa",
"Filiz", # Turkish
"Gabrielle", # French (Canadian)
"Gabrielle",
"Geraint", # English Welsh
"Giorgio", # Italian
"Giorgio",
"Gwyneth", # Welsh
"Hala", # Arabic (Gulf), Neural
"Hannah", # German (Austrian), Neural
"Hans", # German
"Hans",
"Hiujin", # Chinese (Cantonese), Neural
"Ida", # Norwegian, Neural
"Ines", # Portuguese, European
"Ivy", # English
"Jacek", # Polish
"Jan", # Polish
"Joanna", # English
"Joey", # English
"Justin", # English
"Ivy",
"Jacek",
"Jan",
"Joanna",
"Joey",
"Justin",
"Kajal", # English (Indian)/Hindi (Bilingual ), Neural
"Karl", # Icelandic
"Kendra", # English
"Kevin", # English, Neural
"Kimberly", # English
"Karl",
"Kendra",
"Kevin",
"Kimberly",
"Laura", # Dutch, Neural
"Lea", # French
"Liam", # Canadian French, Neural
@@ -84,12 +84,12 @@ SUPPORTED_VOICES: Final[list[str]] = [
"Lotte", # Dutch
"Lucia", # Spanish European
"Lupe", # Spanish US
"Mads", # Danish
"Mads",
"Maja", # Polish
"Marlene", # German
"Mathieu", # French
"Matthew", # English
"Maxim", # Russian
"Marlene",
"Mathieu",
"Matthew",
"Maxim",
"Mia", # Spanish Mexican
"Miguel", # Spanish US
"Mizuki", # Japanese
@@ -100,19 +100,17 @@ SUPPORTED_VOICES: Final[list[str]] = [
"Penelope", # Spanish US
"Pedro", # Spanish US, Neural
"Raveena", # English, Indian
"Ricardo", # Portuguese (Brazilian)
"Ruben", # Dutch
"Russell", # English (Australian)
"Ruth", # English, Neural
"Ricardo",
"Ruben",
"Russell",
"Salli", # English
"Seoyeon", # Korean
"Stephen", # English, Neural
"Suvi", # Finnish
"Takumi", # Japanese
"Takumi",
"Tatyana", # Russian
"Vicki", # German
"Vitoria", # Portuguese, Brazilian
"Zeina", # Arabic
"Zeina",
"Zhiyu", # Chinese
]

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
import logging
from typing import Any, Final
from typing import Final
import boto3
import botocore
@@ -166,8 +166,8 @@ class AmazonPollyProvider(Provider):
def get_tts_audio(
self,
message: str,
language: str,
options: dict[str, Any] | None = None,
language: str | None = None,
options: dict[str, str] | None = None,
) -> TtsAudioType:
"""Request TTS file from Polly."""
if options is None or language is None:

View File

@@ -20,12 +20,13 @@ from homeassistant.components.camera import (
from homeassistant.components.ffmpeg import FFmpegManager, get_ffmpeg_manager
from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME, STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import entity_registry
from homeassistant.helpers.aiohttp_client import (
async_aiohttp_proxy_stream,
async_aiohttp_proxy_web,
async_get_clientsession,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
@@ -145,7 +146,7 @@ async def async_setup_platform(
# with this version, update the old entity with the new unique id.
serial_number = await device.api.async_serial_number
serial_number = serial_number.strip()
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
entity_id = registry.async_get_entity_id(CAMERA_DOMAIN, DOMAIN, serial_number)
if entity_id is not None:
_LOGGER.debug("Updating unique id for camera %s", entity_id)

View File

@@ -27,9 +27,7 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
async_call_later(hass, 900, analytics.send_analytics)
# Send every day
async_track_time_interval(
hass, analytics.send_analytics, INTERVAL, "analytics daily"
)
async_track_time_interval(hass, analytics.send_analytics, INTERVAL)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)

View File

@@ -1,6 +1,5 @@
"""Rest API for Home Assistant."""
import asyncio
from functools import lru_cache
from http import HTTPStatus
import logging
@@ -351,12 +350,6 @@ class APIComponentsView(HomeAssistantView):
return self.json(request.app["hass"].config.components)
@lru_cache
def _cached_template(template_str: str, hass: ha.HomeAssistant) -> template.Template:
"""Return a cached template."""
return template.Template(template_str, hass)
class APITemplateView(HomeAssistantView):
"""View to handle Template requests."""
@@ -369,7 +362,7 @@ class APITemplateView(HomeAssistantView):
raise Unauthorized()
try:
data = await request.json()
tpl = _cached_template(data["template"], request.app["hass"])
tpl = template.Template(data["template"], request.app["hass"])
return tpl.async_render(variables=data.get("variables"), parse_result=False)
except (ValueError, TemplateError) as ex:
return self.json_message(

View File

@@ -13,7 +13,7 @@ from homeassistant.const import (
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import ConfigType
@@ -32,11 +32,11 @@ async def async_get_triggers(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device triggers for Arcam FMJ Receiver control devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
triggers = []
# Get all the integrations entities for this device
for entry in er.async_entries_for_device(registry, device_id):
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain == "media_player":
triggers.append(
{

View File

@@ -180,7 +180,7 @@ class ArestData:
self._resource = resource
self._pin = pin
self.data = {}
self.available = True
self._attr_available = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
@@ -201,7 +201,7 @@ class ArestData:
f"{self._resource}/digital/{self._pin}", timeout=10
)
self.data = {"value": response.json()["return_value"]}
self.available = True
self._attr_available = True
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device %s", self._resource)
self.available = False
self._attr_available = False

View File

@@ -33,7 +33,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArrisDeviceScanner:
class ArrisDeviceScanner(DeviceScanner):
"""Class which queries a Arris TG2492LG router for connected devices."""
"""This class queries a Arris TG2492LG router for connected devices."""
def __init__(self, connect_box: ConnectBox) -> None:
"""Initialize the scanner."""

View File

@@ -42,7 +42,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArubaDeviceScanner |
class ArubaDeviceScanner(DeviceScanner):
"""Class which queries a Aruba Access Point for connected devices."""
"""This class queries a Aruba Access Point for connected devices."""
def __init__(self, config):
"""Initialize the scanner."""

View File

@@ -1,6 +1,7 @@
"""Support for collecting data from the ARWN project."""
from __future__ import annotations
import json
import logging
from homeassistant.components import mqtt
@@ -10,7 +11,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import slugify
from homeassistant.util.json import json_loads_object
_LOGGER = logging.getLogger(__name__)
@@ -102,7 +102,7 @@ async def async_setup_platform(
"""Set up the ARWN platform."""
@callback
def async_sensor_event_received(msg: mqtt.ReceiveMessage) -> None:
def async_sensor_event_received(msg):
"""Process events as sensors.
When a new event on our topic (arwn/#) is received we map it
@@ -115,7 +115,7 @@ async def async_setup_platform(
This lets us dynamically incorporate sensors without any
configuration on our side.
"""
event = json_loads_object(msg.payload)
event = json.loads(msg.payload)
sensors = discover_sensors(msg.topic, event)
if not sensors:
return

View File

@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==1.2.7", "yalexs-ble==2.1.14"]
"requirements": ["yalexs==1.2.7", "yalexs_ble==2.0.4"]
}

View File

@@ -38,7 +38,7 @@ class AugustSubscriberMixin:
def _async_setup_listeners(self):
"""Create interval and stop listeners."""
self._unsub_interval = async_track_time_interval(
self._hass, self._async_refresh, self._update_interval, "august refresh"
self._hass, self._async_refresh, self._update_interval
)
@callback

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aurora",
"iot_class": "cloud_polling",
"loggers": ["auroranoaa"],
"requirements": ["auroranoaa==0.0.3"]
"requirements": ["auroranoaa==0.0.2"]
}

View File

@@ -1,5 +1,5 @@
"""Support for Aurora Forecast sensor."""
from homeassistant.components.sensor import SensorEntity, SensorStateClass
from homeassistant.components.sensor import SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
@@ -28,7 +28,6 @@ class AuroraSensor(AuroraEntity, SensorEntity):
"""Implementation of an aurora sensor."""
_attr_native_unit_of_measurement = PERCENTAGE
_attr_state_class = SensorStateClass.MEASUREMENT
@property
def native_value(self):

View File

@@ -1,35 +1,9 @@
{
"title": "Automation",
"entity_component": {
"state": {
"_": {
"name": "[%key:component::automation::title%]",
"state": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
},
"state_attributes": {
"current": {
"name": "Running automations"
},
"id": {
"name": "ID"
},
"last_triggered": {
"name": "Last triggered"
},
"max": {
"name": "Max running automations"
},
"mode": {
"name": "Run mode",
"state": {
"parallel": "Parallel",
"queued": "Queued",
"restart": "Restart",
"single": "Single"
}
}
}
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"issues": {

View File

@@ -1,7 +1,7 @@
"""Support for Awair sensors."""
from __future__ import annotations
from typing import Any, cast
from typing import cast
from python_awair.air_data import AirData
from python_awair.devices import AwairBaseDevice, AwairLocalDevice
@@ -156,7 +156,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity):
return round(state, 2)
@property
def extra_state_attributes(self) -> dict[str, Any]:
def extra_state_attributes(self) -> dict:
"""Return the Awair Index alongside state attributes.
The Awair Index is a subjective score ranging from 0-4 (inclusive) that
@@ -178,7 +178,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity):
https://docs.developer.getawair.com/?version=latest#awair-score-and-index
"""
sensor_type = self.entity_description.key
attrs: dict[str, Any] = {}
attrs: dict = {}
if not self._air_data:
return attrs
if sensor_type in self._air_data.indices:

View File

@@ -9,7 +9,7 @@ from pathlib import Path
import tarfile
from tarfile import TarError
from tempfile import TemporaryDirectory
from typing import Any, Protocol, cast
from typing import Any, Protocol
from securetar import SecureTarFile, atomic_contents_add
@@ -19,7 +19,6 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import integration_platform
from homeassistant.helpers.json import save_json
from homeassistant.util import dt
from homeassistant.util.json import json_loads_object
from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
@@ -101,11 +100,11 @@ class BackupManager:
try:
with tarfile.open(backup_path, "r:") as backup_file:
if data_file := backup_file.extractfile("./backup.json"):
data = json_loads_object(data_file.read())
data = json.loads(data_file.read())
backup = Backup(
slug=cast(str, data["slug"]),
name=cast(str, data["name"]),
date=cast(str, data["date"]),
slug=data["slug"],
name=data["name"],
date=data["date"],
path=backup_path,
size=round(backup_path.stat().st_size / 1_048_576, 2),
)
@@ -187,8 +186,13 @@ class BackupManager:
"compressed": True,
}
tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar")
size_in_bytes = await self.hass.async_add_executor_job(
self._mkdir_and_generate_backup_contents,
if not self.backup_dir.exists():
LOGGER.debug("Creating backup directory")
self.hass.async_add_executor_job(self.backup_dir.mkdir)
await self.hass.async_add_executor_job(
self._generate_backup_contents,
tar_file_path,
backup_data,
)
@@ -197,7 +201,7 @@ class BackupManager:
name=backup_name,
date=date_str,
path=tar_file_path,
size=round(size_in_bytes / 1_048_576, 2),
size=round(tar_file_path.stat().st_size / 1_048_576, 2),
)
if self.loaded_backups:
self.backups[slug] = backup
@@ -216,16 +220,12 @@ class BackupManager:
if isinstance(result, Exception):
raise result
def _mkdir_and_generate_backup_contents(
def _generate_backup_contents(
self,
tar_file_path: Path,
backup_data: dict[str, Any],
) -> int:
"""Generate backup contents and return the size."""
if not self.backup_dir.exists():
LOGGER.debug("Creating backup directory")
self.backup_dir.mkdir()
) -> None:
"""Generate backup contents."""
with TemporaryDirectory() as tmp_dir, SecureTarFile(
tar_file_path, "w", gzip=False
) as tar_file:
@@ -245,7 +245,6 @@ class BackupManager:
arcname="data",
)
tar_file.add(tmp_dir_path, arcname=".")
return tar_file_path.stat().st_size
def _generate_slug(date: str, name: str) -> str:

View File

@@ -5,7 +5,7 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/baf",
"iot_class": "local_push",
"requirements": ["aiobafi6==0.8.0"],
"requirements": ["aiobafi6==0.7.3"],
"zeroconf": [
{
"type": "_api._tcp.local.",

View File

@@ -39,7 +39,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="comfort_min_speed",
name="Auto Comfort Minimum Speed",
native_step=1,
native_min_value=0,
native_max_value=SPEED_RANGE[1] - 1,
entity_category=EntityCategory.CONFIG,
@@ -49,7 +48,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="comfort_max_speed",
name="Auto Comfort Maximum Speed",
native_step=1,
native_min_value=1,
native_max_value=SPEED_RANGE[1],
entity_category=EntityCategory.CONFIG,
@@ -59,7 +57,6 @@ AUTO_COMFORT_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="comfort_heat_assist_speed",
name="Auto Comfort Heat Assist Speed",
native_step=1,
native_min_value=SPEED_RANGE[0],
native_max_value=SPEED_RANGE[1],
entity_category=EntityCategory.CONFIG,
@@ -72,7 +69,6 @@ FAN_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="return_to_auto_timeout",
name="Return to Auto Timeout",
native_step=1,
native_min_value=ONE_MIN_SECS,
native_max_value=HALF_DAY_SECS,
entity_category=EntityCategory.CONFIG,
@@ -83,7 +79,6 @@ FAN_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="motion_sense_timeout",
name="Motion Sense Timeout",
native_step=1,
native_min_value=ONE_MIN_SECS,
native_max_value=ONE_DAY_SECS,
entity_category=EntityCategory.CONFIG,
@@ -97,7 +92,6 @@ LIGHT_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="light_return_to_auto_timeout",
name="Light Return to Auto Timeout",
native_step=1,
native_min_value=ONE_MIN_SECS,
native_max_value=HALF_DAY_SECS,
entity_category=EntityCategory.CONFIG,
@@ -108,7 +102,6 @@ LIGHT_NUMBER_DESCRIPTIONS = (
BAFNumberDescription(
key="light_auto_motion_timeout",
name="Light Motion Sense Timeout",
native_step=1,
native_min_value=ONE_MIN_SECS,
native_max_value=ONE_DAY_SECS,
entity_category=EntityCategory.CONFIG,

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/balboa",
"iot_class": "local_push",
"loggers": ["pybalboa"],
"requirements": ["pybalboa==1.0.1"]
"requirements": ["pybalboa==1.0.0"]
}

View File

@@ -60,7 +60,7 @@ from .const import (
DEFAULT_PROBABILITY_THRESHOLD,
)
from .helpers import Observation
from .issues import raise_mirrored_entries, raise_no_prob_given_false
from .repairs import raise_mirrored_entries, raise_no_prob_given_false
_LOGGER = logging.getLogger(__name__)

View File

@@ -1,8 +1,8 @@
"""Helpers for generating issues."""
"""Helpers for generating repairs."""
from __future__ import annotations
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers import issue_registry
from . import DOMAIN
from .helpers import Observation
@@ -15,13 +15,13 @@ def raise_mirrored_entries(
if len(observations) != 2:
return
if observations[0].is_mirror(observations[1]):
ir.async_create_issue(
issue_registry.async_create_issue(
hass,
DOMAIN,
"mirrored_entry/" + text,
breaks_in_ha_version="2022.10.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
severity=issue_registry.IssueSeverity.WARNING,
translation_key="manual_migration",
translation_placeholders={"entity": text},
learn_more_url="https://github.com/home-assistant/core/pull/67631",
@@ -31,13 +31,13 @@ def raise_mirrored_entries(
# Should deprecate in some future version (2022.10 at time of writing) & make prob_given_false required in schemas.
def raise_no_prob_given_false(hass: HomeAssistant, text: str) -> None:
"""In previous 2022.9 and earlier, prob_given_false was optional and had a default version."""
ir.async_create_issue(
issue_registry.async_create_issue(
hass,
DOMAIN,
f"no_prob_given_false/{text}",
breaks_in_ha_version="2022.10.0",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
severity=issue_registry.IssueSeverity.ERROR,
translation_key="no_prob_given_false",
translation_placeholders={"entity": text},
learn_more_url="https://github.com/home-assistant/core/pull/67631",

View File

@@ -42,7 +42,7 @@ Device = namedtuple("Device", ["mac", "name", "ip", "last_update"])
class BboxDeviceScanner(DeviceScanner):
"""Scanner for devices connected to the bbox."""
"""This class scans for devices connected to the bbox."""
def __init__(self, config):
"""Get host from config."""

View File

@@ -106,195 +106,114 @@
"turned_off": "{entity_name} turned off"
}
},
"entity_component": {
"_": {
"name": "[%key:component::binary_sensor::title%]",
"state": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"state": {
"battery": {
"name": "Battery",
"state": {
"off": "Normal",
"on": "Low"
}
"off": "Normal",
"on": "Low"
},
"battery_charging": {
"name": "Charging",
"state": {
"off": "Not charging",
"on": "Charging"
}
"off": "Not charging",
"on": "Charging"
},
"carbon_monoxide": {
"name": "Carbon monoxide",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"cold": {
"name": "Cold",
"state": {
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
"on": "Cold"
}
"off": "[%key:component::binary_sensor::state::battery::off%]",
"on": "Cold"
},
"connectivity": {
"name": "Connectivity",
"state": {
"off": "[%key:common::state::disconnected%]",
"on": "[%key:common::state::connected%]"
}
"off": "[%key:common::state::disconnected%]",
"on": "[%key:common::state::connected%]"
},
"door": {
"name": "Door",
"state": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
}
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"garage_door": {
"name": "Garage door",
"state": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
}
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"gas": {
"name": "Gas",
"state": {
"off": "Clear",
"on": "Detected"
}
"off": "Clear",
"on": "Detected"
},
"heat": {
"name": "Heat",
"state": {
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
"on": "Hot"
}
"off": "[%key:component::binary_sensor::state::battery::off%]",
"on": "Hot"
},
"light": {
"name": "Light",
"state": {
"off": "No light",
"on": "Light detected"
}
"off": "No light",
"on": "Light detected"
},
"lock": {
"name": "Lock",
"state": {
"off": "[%key:common::state::locked%]",
"on": "[%key:common::state::unlocked%]"
}
"off": "[%key:common::state::locked%]",
"on": "[%key:common::state::unlocked%]"
},
"moisture": {
"name": "Moisture",
"state": {
"off": "Dry",
"on": "Wet"
}
"off": "Dry",
"on": "Wet"
},
"motion": {
"name": "Motion",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"moving": {
"name": "Moving",
"state": {
"off": "Not moving",
"on": "Moving"
}
"off": "Not moving",
"on": "Moving"
},
"occupancy": {
"name": "Occupancy",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"opening": {
"name": "Opening",
"state": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
}
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"plug": {
"name": "Plug",
"state": {
"off": "Unplugged",
"on": "Plugged in"
}
"off": "Unplugged",
"on": "Plugged in"
},
"presence": {
"name": "Presence",
"state": {
"off": "[%key:component::device_tracker::entity_component::_::state::not_home%]",
"on": "[%key:component::device_tracker::entity_component::_::state::home%]"
}
"off": "[%key:component::device_tracker::state::_::not_home%]",
"on": "[%key:component::device_tracker::state::_::home%]"
},
"problem": {
"name": "Problem",
"state": {
"off": "OK",
"on": "Problem"
}
"off": "OK",
"on": "Problem"
},
"running": {
"name": "Running",
"state": {
"off": "Not running",
"on": "Running"
}
"off": "Not running",
"on": "Running"
},
"safety": {
"name": "Safety",
"state": {
"off": "Safe",
"on": "Unsafe"
}
"off": "Safe",
"on": "Unsafe"
},
"smoke": {
"name": "Smoke",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"sound": {
"name": "Sound",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"update": {
"name": "Update",
"state": {
"off": "Up-to-date",
"on": "Update available"
}
"off": "Up-to-date",
"on": "Update available"
},
"vibration": {
"name": "Vibration",
"state": {
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
}
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"window": {
"name": "Window",
"state": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
}
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
},
"device_class": {

View File

@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/blackbird",
"iot_class": "local_polling",
"loggers": ["pyblackbird"],
"requirements": ["pyblackbird==0.6"]
"requirements": ["pyblackbird==0.5"]
}

View File

@@ -8,7 +8,6 @@ import blebox_uniapi.cover
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
CoverDeviceClass,
CoverEntity,
CoverEntityFeature,
@@ -68,10 +67,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
self._attr_supported_features = (
position | stop | CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
)
if feature.has_tilt:
self._attr_supported_features = (
self._attr_supported_features | CoverEntityFeature.SET_TILT_POSITION
)
@property
def current_cover_position(self) -> int | None:
@@ -82,12 +77,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
return None if position is None else 100 - position
@property
def current_cover_tilt_position(self) -> int | None:
"""Return the current tilt of shutter."""
position = self._feature.tilt_current
return None if position is None else 100 - position
@property
def is_opening(self) -> bool | None:
"""Return whether cover is opening."""
@@ -121,12 +110,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
"""Stop the cover."""
await self._feature.async_stop()
async def async_set_cover_tilt_position(self, **kwargs: Any) -> None:
"""Set the tilt position."""
position = kwargs[ATTR_TILT_POSITION]
await self._feature.async_set_tilt_position(100 - position)
def _is_state(self, state_name) -> bool | None:
value = BLEBOX_TO_HASS_COVER_STATES[self._feature.state]
return None if value is None else value == state_name

View File

@@ -143,7 +143,7 @@ class ActiveBluetoothDataUpdateCoordinator(
self._last_poll = monotonic_time_coarse()
if not self.last_poll_successful:
self.logger.debug("%s: Polling recovered", self.address)
self.logger.debug("%s: Polling recovered")
self.last_poll_successful = True
self._async_handle_bluetooth_poll()

View File

@@ -136,7 +136,7 @@ class ActiveBluetoothProcessorCoordinator(
self._last_poll = monotonic_time_coarse()
if not self.last_poll_successful:
self.logger.debug("%s: Polling recovered", self.address)
self.logger.debug("%s: Polling recovered")
self.last_poll_successful = True
for processor in self._processors:

View File

@@ -98,10 +98,7 @@ class BaseHaScanner(ABC):
self._start_time = self._last_detection = MONOTONIC_TIME()
if not self._cancel_watchdog:
self._cancel_watchdog = async_track_time_interval(
self.hass,
self._async_scanner_watchdog,
SCANNER_WATCHDOG_INTERVAL,
f"{self.name} Bluetooth scanner watchdog",
self.hass, self._async_scanner_watchdog, SCANNER_WATCHDOG_INTERVAL
)
@hass_callback
@@ -168,13 +165,13 @@ class BaseHaScanner(ABC):
"monotonic_time": MONOTONIC_TIME(),
"discovered_devices_and_advertisement_data": [
{
"name": device.name,
"address": device.address,
"rssi": advertisement_data.rssi,
"advertisement_data": advertisement_data,
"details": device.details,
"name": device_adv[0].name,
"address": device_adv[0].address,
"rssi": device_adv[0].rssi,
"advertisement_data": device_adv[1],
"details": device_adv[0].details,
}
for device, advertisement_data in device_adv_datas
for device_adv in device_adv_datas
],
}
@@ -227,27 +224,23 @@ class BaseHaRemoteScanner(BaseHaScanner):
self._async_expire_devices(dt_util.utcnow())
cancel_track = async_track_time_interval(
self.hass,
self._async_expire_devices,
timedelta(seconds=30),
f"{self.name} Bluetooth scanner device expire",
self.hass, self._async_expire_devices, timedelta(seconds=30)
)
cancel_stop = self.hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, self._async_save_history
EVENT_HOMEASSISTANT_STOP, self._save_history
)
self._async_setup_scanner_watchdog()
@hass_callback
def _cancel() -> None:
self._async_save_history()
self._save_history()
self._async_stop_scanner_watchdog()
cancel_track()
cancel_stop()
return _cancel
@hass_callback
def _async_save_history(self, event: Event | None = None) -> None:
def _save_history(self, event: Event | None = None) -> None:
"""Save the history."""
self._storage.async_set_advertisement_history(
self.source,
@@ -259,7 +252,6 @@ class BaseHaRemoteScanner(BaseHaScanner):
),
)
@hass_callback
def _async_expire_devices(self, _datetime: datetime.datetime) -> None:
"""Expire old devices."""
now = MONOTONIC_TIME()
@@ -345,7 +337,7 @@ class BaseHaRemoteScanner(BaseHaScanner):
tx_power=NO_RSSI_VALUE if tx_power is None else tx_power,
platform_data=(),
)
device = BLEDevice(
device = BLEDevice( # type: ignore[no-untyped-call]
address=address,
name=local_name,
details=self._details | details,

View File

@@ -276,7 +276,6 @@ class BluetoothManager:
self.hass,
self._async_check_unavailable,
timedelta(seconds=UNAVAILABLE_TRACK_SECONDS),
"Bluetooth manager unavailable tracking",
)
@hass_callback

View File

@@ -15,11 +15,11 @@
],
"quality_scale": "internal",
"requirements": [
"bleak==0.20.1",
"bleak-retry-connector==3.0.2",
"bluetooth-adapters==0.15.3",
"bleak==0.19.5",
"bleak-retry-connector==2.13.0",
"bluetooth-adapters==0.15.2",
"bluetooth-auto-recovery==1.0.3",
"bluetooth-data-tools==0.3.1",
"dbus-fast==1.84.2"
"dbus-fast==1.84.1"
]
}

View File

@@ -91,16 +91,12 @@ def create_bleak_scanner(
"detection_callback": detection_callback,
"scanning_mode": SCANNING_MODE_TO_BLEAK[scanning_mode],
}
system = platform.system()
if system == "Linux":
if platform.system() == "Linux":
# Only Linux supports multiple adapters
if adapter:
scanner_kwargs["adapter"] = adapter
if scanning_mode == BluetoothScanningMode.PASSIVE:
scanner_kwargs["bluez"] = PASSIVE_SCANNER_ARGS
elif system == "Darwin":
# We want mac address on macOS
scanner_kwargs["cb"] = {"use_bdaddr": True}
_LOGGER.debug("Initializing bluetooth scanner with %s", scanner_kwargs)
try:

View File

@@ -224,28 +224,10 @@ class HaBleakClientWrapper(BleakClient):
self.__disconnected_callback = callback
if self._backend:
self._backend.set_disconnected_callback(
self._make_disconnected_callback(callback),
callback, # type: ignore[arg-type]
**kwargs,
)
def _make_disconnected_callback(
self, callback: Callable[[BleakClient], None] | None
) -> Callable[[], None] | None:
"""Make the disconnected callback.
https://github.com/hbldh/bleak/pull/1256
The disconnected callback needs to get the top level
BleakClientWrapper instance, not the backend instance.
The signature of the callback for the backend is:
Callable[[], None]
To make this work we need to wrap the callback in a partial
that passes the BleakClientWrapper instance as the first
argument.
"""
return None if callback is None else partial(callback, self)
async def connect(self, **kwargs: Any) -> bool:
"""Connect to the specified GATT server."""
assert models.MANAGER is not None
@@ -253,9 +235,7 @@ class HaBleakClientWrapper(BleakClient):
wrapped_backend = self._async_get_best_available_backend_and_device(manager)
self._backend = wrapped_backend.client(
wrapped_backend.device,
disconnected_callback=self._make_disconnected_callback(
self.__disconnected_callback
),
disconnected_callback=self.__disconnected_callback,
timeout=self.__timeout,
hass=manager.hass,
)

View File

@@ -1,7 +1,6 @@
"""Config flow for BMW ConnectedDrive integration."""
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from bimmer_connected.api.authentication import MyBMWAuthentication
@@ -56,61 +55,36 @@ class BMWConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
VERSION = 1
_reauth_entry: config_entries.ConfigEntry | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}"
if not self._reauth_entry:
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
info = None
try:
info = await validate_input(self.hass, user_input)
entry_data = {
**user_input,
CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN),
}
except CannotConnect:
errors["base"] = "cannot_connect"
if info:
if self._reauth_entry:
self.hass.config_entries.async_update_entry(
self._reauth_entry, data=entry_data
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(
self._reauth_entry.entry_id
)
)
return self.async_abort(reason="reauth_successful")
return self.async_create_entry(
title=info["title"],
data=entry_data,
data={
**user_input,
CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN),
},
)
schema = self.add_suggested_values_to_schema(
DATA_SCHEMA, self._reauth_entry.data if self._reauth_entry else {}
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
"""Handle configuration by re-auth."""
self._reauth_entry = self.hass.config_entries.async_get_entry(
self.context["entry_id"]
)
return await self.async_step_user()
@staticmethod
@callback
def async_get_options_flow(

View File

@@ -12,7 +12,6 @@ from httpx import HTTPError, HTTPStatusError, TimeoutException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN
@@ -66,9 +65,8 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
401,
403,
):
# Clear refresh token only and trigger reauth
# Clear refresh token only on issues with authorization
self._update_config_entry_refresh_token(None)
raise ConfigEntryAuthFailed(str(err)) from err
raise UpdateFailed(f"Error communicating with BMW API: {err}") from err
if self.account.refresh_token != old_refresh_token:

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"iot_class": "cloud_polling",
"loggers": ["bimmer_connected"],
"requirements": ["bimmer_connected==0.13.0"]
"requirements": ["bimmer_connected==0.12.1"]
}

View File

@@ -14,8 +14,7 @@
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
}
},
"options": {

View File

@@ -174,10 +174,7 @@ class BondEntity(Entity):
self._bpup_subs.subscribe(self._device_id, self._async_bpup_callback)
self.async_on_remove(
async_track_time_interval(
self.hass,
self._async_update_if_bpup_not_alive,
_FALLBACK_SCAN_INTERVAL,
f"Bond {self.entity_id} fallback polling",
self.hass, self._async_update_if_bpup_not_alive, _FALLBACK_SCAN_INTERVAL
)
)

View File

@@ -7,6 +7,6 @@
"iot_class": "local_push",
"loggers": ["bond_async"],
"quality_scale": "platinum",
"requirements": ["bond-async==0.1.23"],
"requirements": ["bond-async==0.1.22"],
"zeroconf": ["_bond._tcp.local."]
}

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pybravia"],
"requirements": ["pybravia==0.3.2"],
"requirements": ["pybravia==0.3.1"],
"ssdp": [
{
"st": "urn:schemas-sony-com:service:ScalarWebAPI:1",

View File

@@ -136,7 +136,7 @@ class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
async def async_browse_media(
self,
media_content_type: MediaType | str | None = None,
media_content_type: str | None = None,
media_content_id: str | None = None,
) -> BrowseMedia:
"""Browse apps and channels."""
@@ -231,7 +231,7 @@ class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
async def async_get_browse_image(
self,
media_content_type: MediaType | str,
media_content_type: str,
media_content_id: str,
media_image_id: str | None = None,
) -> tuple[bytes | None, str | None]:

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
"quality_scale": "platinum",
"requirements": ["brother==2.3.0"],
"requirements": ["brother==2.2.0"],
"zeroconf": [
{
"type": "_printer._tcp.local.",

View File

@@ -1,33 +0,0 @@
"""Adds constants for brottsplatskartan integration."""
import logging
LOGGER = logging.getLogger(__package__)
CONF_AREA = "area"
DEFAULT_NAME = "Brottsplatskartan"
AREAS = [
"N/A",
"Blekinge län",
"Dalarnas län",
"Gotlands län",
"Gävleborgs län",
"Hallands län",
"Jämtlands län",
"Jönköpings län",
"Kalmar län",
"Kronobergs län",
"Norrbottens län",
"Skåne län",
"Stockholms län",
"Södermanlands län",
"Uppsala län",
"Värmlands län",
"Västerbottens län",
"Västernorrlands län",
"Västmanlands län",
"Västra Götalands län",
"Örebro län",
"Östergötlands län",
]

View File

@@ -3,6 +3,7 @@ from __future__ import annotations
from collections import defaultdict
from datetime import timedelta
import logging
import uuid
import brottsplatskartan
@@ -15,10 +16,38 @@ import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import AREAS, CONF_AREA, DEFAULT_NAME, LOGGER
_LOGGER = logging.getLogger(__name__)
CONF_AREA = "area"
DEFAULT_NAME = "Brottsplatskartan"
SCAN_INTERVAL = timedelta(minutes=30)
AREAS = [
"Blekinge län",
"Dalarnas län",
"Gotlands län",
"Gävleborgs län",
"Hallands län",
"Jämtlands län",
"Jönköpings län",
"Kalmar län",
"Kronobergs län",
"Norrbottens län",
"Skåne län",
"Stockholms län",
"Södermanlands län",
"Uppsala län",
"Värmlands län",
"Västerbottens län",
"Västernorrlands län",
"Västmanlands län",
"Västra Götalands län",
"Örebro län",
"Östergötlands län",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
@@ -70,7 +99,7 @@ class BrottsplatskartanSensor(SensorEntity):
incidents = self._brottsplatskartan.get_incidents()
if incidents is False:
LOGGER.debug("Problems fetching incidents")
_LOGGER.debug("Problems fetching incidents")
return
for incident in incidents:

View File

@@ -129,7 +129,7 @@ class BSBLANClimate(
return PRESET_ECO
return PRESET_NONE
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set hvac mode."""
await self.async_set_data(hvac_mode=hvac_mode)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/bsblan",
"iot_class": "local_polling",
"loggers": ["bsblan"],
"requirements": ["python-bsblan==0.5.11"]
"requirements": ["python-bsblan==0.5.9"]
}

View File

@@ -35,7 +35,7 @@ def get_scanner(
class BTHomeHub5DeviceScanner(DeviceScanner):
"""Class which queries a BT Home Hub 5."""
"""This class queries a BT Home Hub 5."""
def __init__(self, config):
"""Initialise the scanner."""

View File

@@ -54,7 +54,7 @@ _Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"])
class BTSmartHubScanner(DeviceScanner):
"""Class which queries a BT Smart Hub."""
"""This class queries a BT Smart Hub."""
def __init__(self, smarthub_client):
"""Initialise the scanner."""

View File

@@ -20,5 +20,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/bthome",
"iot_class": "local_push",
"requirements": ["bthome-ble==2.9.0"]
"requirements": ["bthome-ble==2.7.0"]
}

View File

@@ -117,7 +117,7 @@ SENSOR_DESCRIPTIONS = {
key=f"{BTHomeSensorDeviceClass.ENERGY}_{Units.ENERGY_KILO_WATT_HOUR}",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
),
# Gas (m3)
(
@@ -127,7 +127,7 @@ SENSOR_DESCRIPTIONS = {
key=f"{BTHomeSensorDeviceClass.GAS}_{Units.VOLUME_CUBIC_METERS}",
device_class=SensorDeviceClass.GAS,
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
),
# Humidity in (percent)
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
@@ -297,16 +297,6 @@ SENSOR_DESCRIPTIONS = {
native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR,
state_class=SensorStateClass.MEASUREMENT,
),
# Water (L)
(
BTHomeSensorDeviceClass.WATER,
Units.VOLUME_LITERS,
): SensorEntityDescription(
key=f"{BTHomeSensorDeviceClass.WATER}_{Units.VOLUME_LITERS}",
device_class=SensorDeviceClass.WATER,
native_unit_of_measurement=UnitOfVolume.LITERS,
state_class=SensorStateClass.TOTAL,
),
}

View File

@@ -11,7 +11,7 @@ from homeassistant.const import (
CONF_TYPE,
)
from homeassistant.core import Context, HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers import entity_registry
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
@@ -31,7 +31,7 @@ async def async_get_actions(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device actions for button devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
return [
{
CONF_DEVICE_ID: device_id,
@@ -39,7 +39,7 @@ async def async_get_actions(
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "press",
}
for entry in er.async_entries_for_device(registry, device_id)
for entry in entity_registry.async_entries_for_device(registry, device_id)
if entry.domain == DOMAIN
]

View File

@@ -16,7 +16,7 @@ from homeassistant.const import (
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import ConfigType
@@ -36,7 +36,7 @@ async def async_get_triggers(
hass: HomeAssistant, device_id: str
) -> list[dict[str, str]]:
"""List device triggers for button devices."""
registry = er.async_get(hass)
registry = entity_registry.async_get(hass)
return [
{
CONF_PLATFORM: "device",
@@ -45,7 +45,7 @@ async def async_get_triggers(
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "pressed",
}
for entry in er.async_entries_for_device(registry, device_id)
for entry in entity_registry.async_entries_for_device(registry, device_id)
if entry.domain == DOMAIN
]

View File

@@ -7,16 +7,5 @@
"action_type": {
"press": "Press {entity_name} button"
}
},
"entity_component": {
"_": {
"name": "[%key:component::button::title%]"
},
"restart": {
"name": "Restart"
},
"update": {
"name": "Update"
}
}
}

View File

@@ -356,10 +356,4 @@ class WebDavCalendarData:
else:
enddate = obj.dtstart.value + timedelta(days=1)
# End date for an all day event is exclusive. This fixes the case where
# an all day event has a start and end values are the same, or the event
# has a zero duration.
if not isinstance(enddate, datetime) and obj.dtstart.value == enddate:
enddate += timedelta(days=1)
return enddate

View File

@@ -42,7 +42,6 @@ from .const import (
EVENT_IN,
EVENT_IN_DAYS,
EVENT_IN_WEEKS,
EVENT_LOCATION,
EVENT_RECURRENCE_ID,
EVENT_RECURRENCE_RANGE,
EVENT_RRULE,
@@ -68,23 +67,6 @@ SCAN_INTERVAL = datetime.timedelta(seconds=60)
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
def _has_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Assert that all datetime values have a timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Validate that all datetime values have a timezone."""
for k in keys:
if (
(value := obj.get(k))
and isinstance(value, datetime.datetime)
and value.tzinfo is None
):
raise vol.Invalid("Expected all values to have a timezone")
return obj
return validate
def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all datetime values have a consistent timezone."""
@@ -107,7 +89,7 @@ def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]
"""Convert all datetime values to the local timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Convert all keys that are datetime values to local timezone."""
"""Test that all keys that are datetime values have the same timezone."""
for k in keys:
if (value := obj.get(k)) and isinstance(value, datetime.datetime):
obj[k] = dt.as_local(value)
@@ -116,59 +98,23 @@ def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]
return validate
def _has_duration(
start_key: str, end_key: str
) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the time span between start and end is positive."""
def _is_sorted(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the specified values are sequential."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict are in order."""
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
duration = end - start
if duration.total_seconds() <= 0:
raise vol.Invalid(f"Expected positive event duration ({start}, {end})")
values = []
for k in keys:
if not (value := obj.get(k)):
return obj
values.append(value)
if all(values) and values != sorted(values):
raise vol.Invalid(f"Values were not in order: {values}")
return obj
return validate
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all values are of the same type."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict have values of the same type."""
uniq_values = groupby(type(obj[k]) for k in keys)
if len(list(uniq_values)) > 1:
raise vol.Invalid(f"Expected all values to be the same type: {keys}")
return obj
return validate
def _validate_rrule(value: Any) -> str:
"""Validate a recurrence rule string."""
if value is None:
raise vol.Invalid("rrule value is None")
if not isinstance(value, str):
raise vol.Invalid("rrule value expected a string")
try:
rrulestr(value)
except ValueError as err:
raise vol.Invalid(f"Invalid rrule: {str(err)}") from err
# Example format: FREQ=DAILY;UNTIL=...
rule_parts = dict(s.split("=", 1) for s in value.split(";"))
if not (freq := rule_parts.get("FREQ")):
raise vol.Invalid("rrule did not contain FREQ")
if freq not in VALID_FREQS:
raise vol.Invalid(f"Invalid frequency for rule: {value}")
return str(value)
CREATE_EVENT_SERVICE = "create_event"
CREATE_EVENT_SCHEMA = vol.All(
cv.has_at_least_one_key(EVENT_START_DATE, EVENT_START_DATETIME, EVENT_IN),
@@ -177,7 +123,6 @@ CREATE_EVENT_SCHEMA = vol.All(
{
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION, default=""): cv.string,
vol.Optional(EVENT_LOCATION): cv.string,
vol.Inclusive(
EVENT_START_DATE, "dates", "Start and end dates must both be specified"
): cv.date,
@@ -204,43 +149,8 @@ CREATE_EVENT_SCHEMA = vol.All(
),
_has_consistent_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
_as_local_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
_has_duration(EVENT_START_DATE, EVENT_END_DATE),
_has_duration(EVENT_START_DATETIME, EVENT_END_DATETIME),
)
WEBSOCKET_EVENT_SCHEMA = vol.Schema(
vol.All(
{
vol.Required(EVENT_START): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_END): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION): cv.string,
vol.Optional(EVENT_LOCATION): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_as_local_timezone(EVENT_START, EVENT_END),
_has_duration(EVENT_START, EVENT_END),
)
)
# Validation for the CalendarEvent dataclass
CALENDAR_EVENT_SCHEMA = vol.Schema(
vol.All(
{
vol.Required("start"): vol.Any(cv.date, cv.datetime),
vol.Required("end"): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
_has_same_type("start", "end"),
_has_timezone("start", "end"),
_has_consistent_timezone("start", "end"),
_as_local_timezone("start", "end"),
_has_duration("start", "end"),
),
extra=vol.ALLOW_EXTRA,
_is_sorted(EVENT_START_DATE, EVENT_END_DATE),
_is_sorted(EVENT_START_DATETIME, EVENT_END_DATETIME),
)
@@ -333,19 +243,6 @@ class CalendarEvent:
"all_day": self.all_day,
}
def __post_init__(self) -> None:
"""Perform validation on the CalendarEvent."""
def skip_none(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
return {k: v for k, v in obj if v is not None}
try:
CALENDAR_EVENT_SCHEMA(dataclasses.asdict(self, dict_factory=skip_none))
except vol.Invalid as err:
raise HomeAssistantError(
f"Failed to validate CalendarEvent: {err}"
) from err
def _event_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
"""Convert CalendarEvent dataclass items to dictionary of attributes."""
@@ -419,6 +316,30 @@ def is_offset_reached(
return start + offset_time <= dt.now(start.tzinfo)
def _validate_rrule(value: Any) -> str:
"""Validate a recurrence rule string."""
if value is None:
raise vol.Invalid("rrule value is None")
if not isinstance(value, str):
raise vol.Invalid("rrule value expected a string")
try:
rrulestr(value)
except ValueError as err:
raise vol.Invalid(f"Invalid rrule: {str(err)}") from err
# Example format: FREQ=DAILY;UNTIL=...
rule_parts = dict(s.split("=", 1) for s in value.split(";"))
if not (freq := rule_parts.get("FREQ")):
raise vol.Invalid("rrule did not contain FREQ")
if freq not in VALID_FREQS:
raise vol.Invalid(f"Invalid frequency for rule: {value}")
return str(value)
class CalendarEntity(Entity):
"""Base class for calendar event entities."""
@@ -523,10 +444,9 @@ class CalendarEventView(http.HomeAssistantView):
try:
calendar_event_list = await entity.async_get_events(
request.app["hass"], dt.as_local(start_date), dt.as_local(end_date)
request.app["hass"], start_date, end_date
)
except HomeAssistantError as err:
_LOGGER.debug("Error reading events: %s", err)
return self.json_message(
f"Error reading events: {err}", HTTPStatus.INTERNAL_SERVER_ERROR
)
@@ -561,11 +481,38 @@ class CalendarListView(http.HomeAssistantView):
return self.json(sorted(calendar_list, key=lambda x: cast(str, x["name"])))
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all values are of the same type."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict have values of the same type."""
uniq_values = groupby(type(obj[k]) for k in keys)
if len(list(uniq_values)) > 1:
raise vol.Invalid(f"Expected all values to be the same type: {keys}")
return obj
return validate
@websocket_api.websocket_command(
{
vol.Required("type"): "calendar/event/create",
vol.Required("entity_id"): cv.entity_id,
CONF_EVENT: WEBSOCKET_EVENT_SCHEMA,
CONF_EVENT: vol.Schema(
vol.All(
{
vol.Required(EVENT_START): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_END): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_as_local_timezone(EVENT_START, EVENT_END),
_is_sorted(EVENT_START, EVENT_END),
)
),
}
)
@websocket_api.async_response
@@ -648,7 +595,21 @@ async def handle_calendar_event_delete(
vol.Required(EVENT_UID): cv.string,
vol.Optional(EVENT_RECURRENCE_ID): cv.string,
vol.Optional(EVENT_RECURRENCE_RANGE): cv.string,
vol.Required(CONF_EVENT): WEBSOCKET_EVENT_SCHEMA,
vol.Required(CONF_EVENT): vol.Schema(
vol.All(
{
vol.Required(EVENT_START): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_END): vol.Any(cv.date, cv.datetime),
vol.Required(EVENT_SUMMARY): cv.string,
vol.Optional(EVENT_DESCRIPTION): cv.string,
vol.Optional(EVENT_RRULE): _validate_rrule,
},
_has_same_type(EVENT_START, EVENT_END),
_has_consistent_timezone(EVENT_START, EVENT_END),
_as_local_timezone(EVENT_START, EVENT_END),
_is_sorted(EVENT_START, EVENT_END),
)
),
}
)
@websocket_api.async_response

View File

@@ -1,11 +1,11 @@
"""Constants for calendar components."""
from enum import IntFlag
from enum import IntEnum
CONF_EVENT = "event"
class CalendarEntityFeature(IntFlag):
class CalendarEntityFeature(IntEnum):
"""Supported features of the calendar entity."""
CREATE_EVENT = 1

View File

@@ -46,9 +46,3 @@ create_event:
name: In
description: Days or weeks that you want to create the event in.
example: '{"days": 2} or {"weeks": 2}'
location:
name: Location
description: The location of the event.
example: "Conference Room - F123, Bldg. 002"
selector:
text:

View File

@@ -1,36 +1,9 @@
{
"title": "Calendar",
"entity_component": {
"state": {
"_": {
"name": "[%key:component::calendar::title%]",
"state": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
},
"state_attributes": {
"all_day": {
"name": "All day",
"state": {
"true": "[%key:common::state::yes%]",
"false": "[%key:common::state::no%]"
}
},
"description": {
"name": "Description"
},
"end_time": {
"name": "End time"
},
"location": {
"name": "Location"
},
"messages": {
"name": "Message"
},
"start_time": {
"name": "Start time"
}
}
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

Some files were not shown because too many files have changed in this diff Show More