This commit is contained in:
Franck Nijhof 2025-05-07 19:13:53 +02:00 committed by GitHub
commit e8bdc7286e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1992 changed files with 112539 additions and 41381 deletions

View File

@ -1,5 +1,6 @@
name: Report an issue with Home Assistant Core
description: Report an issue with Home Assistant Core.
type: Bug
body:
- type: markdown
attributes:

View File

@ -32,7 +32,7 @@ jobs:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -116,7 +116,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: translations
@ -324,7 +324,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.8.1
uses: sigstore/cosign-installer@v3.8.2
with:
cosign-release: "v2.2.3"
@ -457,12 +457,12 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: translations
@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -40,7 +40,7 @@ env:
CACHE_VERSION: 12
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2025.4"
HA_SHORT_VERSION: "2025.5"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@ -249,7 +249,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -294,7 +294,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -334,7 +334,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -374,7 +374,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@ -484,7 +484,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -587,7 +587,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -620,7 +620,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -653,7 +653,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Dependency review
uses: actions/dependency-review-action@v4.5.0
uses: actions/dependency-review-action@v4.6.0
with:
license-check: false # We use our own license audit checks
@ -677,7 +677,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -720,7 +720,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -767,7 +767,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -812,7 +812,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -889,7 +889,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -949,7 +949,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -968,7 +968,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: pytest_buckets
- name: Compile English translations
@ -1074,7 +1074,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -1208,7 +1208,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -1312,12 +1312,12 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v5.4.0
uses: codecov/codecov-action@v5.4.2
with:
fail_ci_if_error: true
flags: full-suite
@ -1359,7 +1359,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@ -1454,12 +1454,12 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v5.4.0
uses: codecov/codecov-action@v5.4.2
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
@ -1479,7 +1479,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
pattern: test-results-*
- name: Upload test results to Codecov

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.28.13
uses: github/codeql-action/init@v3.28.16
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.28.13
uses: github/codeql-action/analyze@v3.28.16
with:
category: "/language:python"

View File

@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@ -36,7 +36,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.5.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@ -138,17 +138,17 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: requirements_diff
@ -187,22 +187,22 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.2.1
uses: actions/download-artifact@v4.3.0
with:
name: requirements_all_wheels

View File

@ -291,6 +291,7 @@ homeassistant.components.kaleidescape.*
homeassistant.components.knocki.*
homeassistant.components.knx.*
homeassistant.components.kraken.*
homeassistant.components.kulersky.*
homeassistant.components.lacrosse.*
homeassistant.components.lacrosse_view.*
homeassistant.components.lamarzocco.*
@ -362,8 +363,10 @@ homeassistant.components.no_ip.*
homeassistant.components.nordpool.*
homeassistant.components.notify.*
homeassistant.components.notion.*
homeassistant.components.ntfy.*
homeassistant.components.number.*
homeassistant.components.nut.*
homeassistant.components.ohme.*
homeassistant.components.onboarding.*
homeassistant.components.oncue.*
homeassistant.components.onedrive.*
@ -383,6 +386,7 @@ homeassistant.components.pandora.*
homeassistant.components.panel_custom.*
homeassistant.components.peblar.*
homeassistant.components.peco.*
homeassistant.components.pegel_online.*
homeassistant.components.persistent_notification.*
homeassistant.components.person.*
homeassistant.components.pi_hole.*
@ -459,6 +463,7 @@ homeassistant.components.slack.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*
homeassistant.components.smtp.*
homeassistant.components.snooz.*
homeassistant.components.solarlog.*
homeassistant.components.sonarr.*

26
CODEOWNERS generated
View File

@ -171,6 +171,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/avea/ @pattyland
/homeassistant/components/awair/ @ahayworth @danielsjf
/tests/components/awair/ @ahayworth @danielsjf
/homeassistant/components/aws_s3/ @tomasbedrich
/tests/components/aws_s3/ @tomasbedrich
/homeassistant/components/axis/ @Kane610
/tests/components/axis/ @Kane610
/homeassistant/components/azure_data_explorer/ @kaareseras
@ -432,7 +434,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
/homeassistant/components/ephember/ @ttroy50
/homeassistant/components/ephember/ @ttroy50 @roberty99
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
/tests/components/epic_games_store/ @hacf-fr @Quentame
/homeassistant/components/epion/ @lhgravendeel
@ -704,6 +706,8 @@ build.json @home-assistant/supervisor
/tests/components/image_upload/ @home-assistant/core
/homeassistant/components/imap/ @jbouwh
/tests/components/imap/ @jbouwh
/homeassistant/components/imeon_inverter/ @Imeon-Energy
/tests/components/imeon_inverter/ @Imeon-Energy
/homeassistant/components/imgw_pib/ @bieniu
/tests/components/imgw_pib/ @bieniu
/homeassistant/components/improv_ble/ @emontnemery
@ -935,6 +939,8 @@ build.json @home-assistant/supervisor
/tests/components/metoffice/ @MrHarcombe @avee87
/homeassistant/components/microbees/ @microBeesTech
/tests/components/microbees/ @microBeesTech
/homeassistant/components/miele/ @astrandb
/tests/components/miele/ @astrandb
/homeassistant/components/mikrotik/ @engrbm87
/tests/components/mikrotik/ @engrbm87
/homeassistant/components/mill/ @danielhiversen
@ -1047,6 +1053,8 @@ build.json @home-assistant/supervisor
/tests/components/nsw_fuel_station/ @nickw444
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
/homeassistant/components/ntfy/ @tr4nt0r
/tests/components/ntfy/ @tr4nt0r
/homeassistant/components/nuheat/ @tstabrawa
/tests/components/nuheat/ @tstabrawa
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
@ -1075,8 +1083,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/ombi/ @larssont
/homeassistant/components/onboarding/ @home-assistant/core
/tests/components/onboarding/ @home-assistant/core
/homeassistant/components/oncue/ @bdraco @peterager
/tests/components/oncue/ @bdraco @peterager
/homeassistant/components/ondilo_ico/ @JeromeHXP
/tests/components/ondilo_ico/ @JeromeHXP
/homeassistant/components/onedrive/ @zweckj
@ -1254,6 +1260,8 @@ build.json @home-assistant/supervisor
/tests/components/recovery_mode/ @home-assistant/core
/homeassistant/components/refoss/ @ashionky
/tests/components/refoss/ @ashionky
/homeassistant/components/rehlko/ @bdraco @peterager
/tests/components/rehlko/ @bdraco @peterager
/homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555
@ -1387,7 +1395,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/siren/ @home-assistant/core @raman325
/tests/components/siren/ @home-assistant/core @raman325
/homeassistant/components/sisyphus/ @jkeljo
/homeassistant/components/sky_hub/ @rogerselwyn
/homeassistant/components/sky_remote/ @dunnmj @saty9
/tests/components/sky_remote/ @dunnmj @saty9
/homeassistant/components/skybell/ @tkdrob
@ -1434,8 +1441,8 @@ build.json @home-assistant/supervisor
/tests/components/solarlog/ @Ernst79 @dontinelli
/homeassistant/components/solax/ @squishykid @Darsstar
/tests/components/solax/ @squishykid @Darsstar
/homeassistant/components/soma/ @ratsept @sebfortier2288
/tests/components/soma/ @ratsept @sebfortier2288
/homeassistant/components/soma/ @ratsept
/tests/components/soma/ @ratsept
/homeassistant/components/sonarr/ @ctalkington
/tests/components/sonarr/ @ctalkington
/homeassistant/components/songpal/ @rytilahti @shenxn
@ -1467,7 +1474,8 @@ build.json @home-assistant/supervisor
/tests/components/steam_online/ @tkdrob
/homeassistant/components/steamist/ @bdraco
/tests/components/steamist/ @bdraco
/homeassistant/components/stiebel_eltron/ @fucm
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
/homeassistant/components/stookwijzer/ @fwestenberg
/tests/components/stookwijzer/ @fwestenberg
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
@ -1670,8 +1678,8 @@ build.json @home-assistant/supervisor
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
/tests/components/vodafone_station/ @paoloantinori @chemelli74
/homeassistant/components/voip/ @balloob @synesthesiam
/tests/components/voip/ @balloob @synesthesiam
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
/tests/components/voip/ @balloob @synesthesiam @jaminh
/homeassistant/components/volumio/ @OnFreund
/tests/components/volumio/ @OnFreund
/homeassistant/components/volvooncall/ @molobrakos

2
Dockerfile generated
View File

@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.6.10
RUN pip3 install uv==0.7.1
WORKDIR /usr/src

View File

@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.02.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.02.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.02.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.02.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.02.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@ -53,6 +53,7 @@ from .components import (
logbook as logbook_pre_import, # noqa: F401
lovelace as lovelace_pre_import, # noqa: F401
onboarding as onboarding_pre_import, # noqa: F401
person as person_pre_import, # noqa: F401
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
repairs as repairs_pre_import, # noqa: F401
search as search_pre_import, # noqa: F401
@ -859,8 +860,14 @@ async def _async_set_up_integrations(
integrations, all_integrations = await _async_resolve_domains_and_preload(
hass, config
)
all_domains = set(all_integrations)
domains = set(integrations)
# Detect all cycles
integrations_after_dependencies = (
await loader.resolve_integrations_after_dependencies(
hass, all_integrations.values(), set(all_integrations)
)
)
all_domains = set(integrations_after_dependencies)
domains = set(integrations) & all_domains
_LOGGER.info(
"Domains to be set up: %s | %s",
@ -868,6 +875,8 @@ async def _async_set_up_integrations(
all_domains - domains,
)
async_set_domains_to_be_loaded(hass, all_domains)
# Initialize recorder
if "recorder" in all_domains:
recorder.async_initialize_recorder(hass)
@ -900,24 +909,12 @@ async def _async_set_up_integrations(
stage_dep_domains_unfiltered = {
dep
for domain in stage_domains
for dep in all_integrations[domain].all_dependencies
for dep in integrations_after_dependencies[domain]
if dep not in stage_domains
}
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
stage_all_domains = stage_domains | stage_dep_domains
stage_all_integrations = {
domain: all_integrations[domain] for domain in stage_all_domains
}
# Detect all cycles
stage_integrations_after_dependencies = (
await loader.resolve_integrations_after_dependencies(
hass, stage_all_integrations.values(), stage_all_domains
)
)
stage_all_domains = set(stage_integrations_after_dependencies)
stage_domains &= stage_all_domains
stage_dep_domains &= stage_all_domains
_LOGGER.info(
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
@ -928,8 +925,6 @@ async def _async_set_up_integrations(
stage_dep_domains_unfiltered - stage_dep_domains,
)
async_set_domains_to_be_loaded(hass, stage_all_domains)
if timeout is None:
await _async_setup_multi_components(hass, stage_all_domains, config)
continue

View File

@ -1,5 +1,12 @@
{
"domain": "amazon",
"name": "Amazon",
"integrations": ["alexa", "amazon_polly", "aws", "fire_tv", "route53"]
"integrations": [
"alexa",
"amazon_polly",
"aws",
"aws_s3",
"fire_tv",
"route53"
]
}

View File

@ -6,6 +6,7 @@
"google_assistant_sdk",
"google_cloud",
"google_drive",
"google_gemini",
"google_generative_ai_conversation",
"google_mail",
"google_maps",

View File

@ -0,0 +1,6 @@
{
"domain": "nuki",
"name": "Nuki",
"integrations": ["nuki"],
"iot_standards": ["matter"]
}

View File

@ -67,6 +67,7 @@ POLLEN_CATEGORY_MAP = {
2: "moderate",
3: "high",
4: "very_high",
5: "extreme",
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)

View File

@ -72,10 +72,11 @@
"level": {
"name": "Level",
"state": {
"high": "High",
"low": "Low",
"extreme": "Extreme",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "Moderate",
"very_high": "Very high"
"very_high": "[%key:common::state::very_high%]"
}
}
}
@ -89,10 +90,11 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
"very_high": "[%key:common::state::very_high%]"
}
}
}
@ -123,10 +125,11 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
"very_high": "[%key:common::state::very_high%]"
}
}
}
@ -167,10 +170,11 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
"very_high": "[%key:common::state::very_high%]"
}
}
}
@ -181,10 +185,11 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
"very_high": "[%key:common::state::very_high%]"
}
}
}
@ -195,10 +200,11 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
"very_high": "[%key:common::state::very_high%]"
}
}
}

View File

@ -2,25 +2,38 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import CONNECTION_TYPE, LOCAL
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
PLATFORMS = [Platform.CLIMATE]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
"""Set up Adax from a config entry."""
if entry.data.get(CONNECTION_TYPE) == LOCAL:
local_coordinator = AdaxLocalCoordinator(hass, entry)
entry.runtime_data = local_coordinator
else:
cloud_coordinator = AdaxCloudCoordinator(hass, entry)
entry.runtime_data = cloud_coordinator
await entry.runtime_data.async_config_entry_first_refresh()
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
async def async_migrate_entry(
hass: HomeAssistant, config_entry: AdaxConfigEntry
) -> bool:
"""Migrate old entry."""
# convert title and unique_id to string
if config_entry.version == 1:

View File

@ -12,57 +12,42 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_TOKEN,
CONF_UNIQUE_ID,
PRECISION_WHOLE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ACCOUNT_ID, CONNECTION_TYPE, DOMAIN, LOCAL
from . import AdaxConfigEntry
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
from .coordinator import AdaxCloudCoordinator, AdaxLocalCoordinator
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: AdaxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Adax thermostat with config flow."""
if entry.data.get(CONNECTION_TYPE) == LOCAL:
adax_data_handler = AdaxLocal(
entry.data[CONF_IP_ADDRESS],
entry.data[CONF_TOKEN],
websession=async_get_clientsession(hass, verify_ssl=False),
)
local_coordinator = cast(AdaxLocalCoordinator, entry.runtime_data)
async_add_entities(
[LocalAdaxDevice(adax_data_handler, entry.data[CONF_UNIQUE_ID])], True
[LocalAdaxDevice(local_coordinator, entry.data[CONF_UNIQUE_ID])],
)
return
adax_data_handler = Adax(
entry.data[ACCOUNT_ID],
entry.data[CONF_PASSWORD],
websession=async_get_clientsession(hass),
)
else:
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
async_add_entities(
(
AdaxDevice(room, adax_data_handler)
for room in await adax_data_handler.get_rooms()
),
True,
AdaxDevice(cloud_coordinator, device_id)
for device_id in cloud_coordinator.data
)
class AdaxDevice(ClimateEntity):
class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
"""Representation of a heater."""
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
@ -76,20 +61,37 @@ class AdaxDevice(ClimateEntity):
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
def __init__(
self,
coordinator: AdaxCloudCoordinator,
device_id: str,
) -> None:
"""Initialize the heater."""
self._device_id = heater_data["id"]
self._adax_data_handler = adax_data_handler
super().__init__(coordinator)
self._adax_data_handler: Adax = coordinator.adax_data_handler
self._device_id = device_id
self._attr_unique_id = f"{heater_data['homeId']}_{heater_data['id']}"
self._attr_name = self.room["name"]
self._attr_unique_id = f"{self.room['homeId']}_{self._device_id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, heater_data["id"])},
identifiers={(DOMAIN, device_id)},
# Instead of setting the device name to the entity name, adax
# should be updated to set has_entity_name = True, and set the entity
# name to None
name=cast(str | None, self.name),
manufacturer="Adax",
)
self._apply_data(self.room)
@property
def available(self) -> bool:
"""Whether the entity is available or not."""
return super().available and self._device_id in self.coordinator.data
@property
def room(self) -> dict[str, Any]:
"""Gets the data for this particular device."""
return self.coordinator.data[self._device_id]
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set hvac mode."""
@ -104,7 +106,9 @@ class AdaxDevice(ClimateEntity):
)
else:
return
await self._adax_data_handler.update()
# Request data refresh from source to verify that update was successful
await self.coordinator.async_request_refresh()
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
@ -114,12 +118,15 @@ class AdaxDevice(ClimateEntity):
self._device_id, temperature, True
)
async def async_update(self) -> None:
"""Get the latest data."""
for room in await self._adax_data_handler.get_rooms():
if room["id"] != self._device_id:
continue
self._attr_name = room["name"]
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if room := self.room:
self._apply_data(room)
super()._handle_coordinator_update()
def _apply_data(self, room: dict[str, Any]) -> None:
"""Update the appropriate attributues based on received data."""
self._attr_current_temperature = room.get("temperature")
self._attr_target_temperature = room.get("targetTemperature")
if room["heatingEnabled"]:
@ -128,14 +135,14 @@ class AdaxDevice(ClimateEntity):
else:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
return
class LocalAdaxDevice(ClimateEntity):
class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
"""Representation of a heater."""
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
_attr_hvac_mode = HVACMode.HEAT
_attr_hvac_mode = HVACMode.OFF
_attr_icon = "mdi:radiator-off"
_attr_max_temp = 35
_attr_min_temp = 5
_attr_supported_features = (
@ -146,9 +153,10 @@ class LocalAdaxDevice(ClimateEntity):
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(self, adax_data_handler: AdaxLocal, unique_id: str) -> None:
def __init__(self, coordinator: AdaxLocalCoordinator, unique_id: str) -> None:
"""Initialize the heater."""
self._adax_data_handler = adax_data_handler
super().__init__(coordinator)
self._adax_data_handler: AdaxLocal = coordinator.adax_data_handler
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
@ -169,9 +177,10 @@ class LocalAdaxDevice(ClimateEntity):
return
await self._adax_data_handler.set_target_temperature(temperature)
async def async_update(self) -> None:
"""Get the latest data."""
data = await self._adax_data_handler.get_status()
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if data := self.coordinator.data:
self._attr_current_temperature = data["current_temperature"]
self._attr_available = self._attr_current_temperature is not None
if (target_temp := data["target_temperature"]) == 0:
@ -183,3 +192,5 @@ class LocalAdaxDevice(ClimateEntity):
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
self._attr_target_temperature = target_temp
super()._handle_coordinator_update()

View File

@ -1,5 +1,6 @@
"""Constants for the Adax integration."""
import datetime
from typing import Final
ACCOUNT_ID: Final = "account_id"
@ -9,3 +10,5 @@ DOMAIN: Final = "adax"
LOCAL = "Local"
WIFI_SSID = "wifi_ssid"
WIFI_PSWD = "wifi_pswd"
SCAN_INTERVAL = datetime.timedelta(seconds=60)

View File

@ -0,0 +1,71 @@
"""DataUpdateCoordinator for the Adax component."""
import logging
from typing import Any, cast
from adax import Adax
from adax_local import Adax as AdaxLocal
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import ACCOUNT_ID, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type AdaxConfigEntry = ConfigEntry[AdaxCloudCoordinator | AdaxLocalCoordinator]
class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
"""Coordinator for updating data to and from Adax (cloud)."""
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
"""Initialize the Adax coordinator used for Cloud mode."""
super().__init__(
hass,
config_entry=entry,
logger=_LOGGER,
name="AdaxCloud",
update_interval=SCAN_INTERVAL,
)
self.adax_data_handler = Adax(
entry.data[ACCOUNT_ID],
entry.data[CONF_PASSWORD],
websession=async_get_clientsession(hass),
)
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from the Adax."""
rooms = await self.adax_data_handler.get_rooms() or []
return {r["id"]: r for r in rooms}
class AdaxLocalCoordinator(DataUpdateCoordinator[dict[str, Any] | None]):
"""Coordinator for updating data to and from Adax (local)."""
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
"""Initialize the Adax coordinator used for Local mode."""
super().__init__(
hass,
config_entry=entry,
logger=_LOGGER,
name="AdaxLocal",
update_interval=SCAN_INTERVAL,
)
self.adax_data_handler = AdaxLocal(
entry.data[CONF_IP_ADDRESS],
entry.data[CONF_TOKEN],
websession=async_get_clientsession(hass, verify_ssl=False),
)
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from the Adax."""
if result := await self.adax_data_handler.get_status():
return cast(dict[str, Any], result)
raise UpdateFailed("Got invalid status from device")

View File

@ -68,8 +68,8 @@
"led_bar_mode": {
"name": "LED bar mode",
"state": {
"off": "Off",
"co2": "Carbon dioxide",
"off": "[%key:common::state::off%]",
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
"pm": "Particulate matter"
}
},
@ -143,8 +143,8 @@
"led_bar_mode": {
"name": "[%key:component::airgradient::entity::select::led_bar_mode::name%]",
"state": {
"off": "[%key:component::airgradient::entity::select::led_bar_mode::state::off%]",
"co2": "[%key:component::airgradient::entity::select::led_bar_mode::state::co2%]",
"off": "[%key:common::state::off%]",
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
"pm": "[%key:component::airgradient::entity::select::led_bar_mode::state::pm%]"
}
},

View File

@ -16,8 +16,8 @@
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"city": "City",
"country": "Country",
"state": "State"
"state": "State",
"country": "[%key:common::config_flow::data::country%]"
}
},
"reauth_confirm": {
@ -56,12 +56,12 @@
"sensor": {
"pollutant_label": {
"state": {
"co": "Carbon monoxide",
"n2": "Nitrogen dioxide",
"o3": "Ozone",
"p1": "PM10",
"p2": "PM2.5",
"s2": "Sulfur dioxide"
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
}
},
"pollutant_level": {

View File

@ -9,6 +9,8 @@ from aioairzone.const import (
AZD_HUMIDITY,
AZD_TEMP,
AZD_TEMP_UNIT,
AZD_THERMOSTAT_BATTERY,
AZD_THERMOSTAT_SIGNAL,
AZD_WEBSERVER,
AZD_WIFI_RSSI,
AZD_ZONES,
@ -73,6 +75,20 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
device_class=SensorDeviceClass.BATTERY,
key=AZD_THERMOSTAT_BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
key=AZD_THERMOSTAT_SIGNAL,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
translation_key="thermostat_signal",
),
)

View File

@ -76,6 +76,9 @@
"sensor": {
"rssi": {
"name": "RSSI"
},
"thermostat_signal": {
"name": "Signal strength"
}
}
}

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.6.11"]
"requirements": ["aioairzone-cloud==0.6.12"]
}

View File

@ -32,9 +32,9 @@
"air_quality": {
"name": "Air Quality mode",
"state": {
"off": "Off",
"on": "On",
"auto": "Auto"
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]",
"auto": "[%key:common::state::auto%]"
}
},
"modes": {

View File

@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
yield Alexa(self.entity)
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
class MediaPlayerCapabilities(AlexaEntity):
"""Class to represent MediaPlayer capabilities."""
@ -757,9 +757,7 @@ class MediaPlayerCapabilities(AlexaEntity):
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
inputs = AlexaInputController.get_valid_inputs(
self.entity.attributes.get(
media_player.const.ATTR_INPUT_SOURCE_LIST, []
)
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
)
if len(inputs) > 0:
yield AlexaInputController(self.entity)
@ -776,8 +774,7 @@ class MediaPlayerCapabilities(AlexaEntity):
and domain != "denonavr"
):
inputs = AlexaEqualizerController.get_valid_inputs(
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
or []
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
)
if len(inputs) > 0:
yield AlexaEqualizerController(self.entity)

View File

@ -566,7 +566,7 @@ async def async_api_set_volume(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
}
await hass.services.async_call(
@ -589,7 +589,7 @@ async def async_api_select_input(
# Attempt to map the ALL UPPERCASE payload name to a source.
# Strips trailing 1 to match single input devices.
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
for source in source_list:
formatted_source = (
source.lower().replace("-", "").replace("_", "").replace(" ", "")
@ -611,7 +611,7 @@ async def async_api_select_input(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.const.ATTR_INPUT_SOURCE: media_input,
media_player.ATTR_INPUT_SOURCE: media_input,
}
await hass.services.async_call(
@ -636,7 +636,7 @@ async def async_api_adjust_volume(
volume_delta = int(directive.payload["volume"])
entity = directive.entity
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
# read current state
try:
@ -648,7 +648,7 @@ async def async_api_adjust_volume(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
}
await hass.services.async_call(
@ -709,7 +709,7 @@ async def async_api_set_mute(
entity = directive.entity
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
}
await hass.services.async_call(
@ -1708,15 +1708,13 @@ async def async_api_changechannel(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
media_player.const.MEDIA_TYPE_CHANNEL
),
media_player.ATTR_MEDIA_CONTENT_ID: channel,
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
}
await hass.services.async_call(
entity.domain,
media_player.const.SERVICE_PLAY_MEDIA,
media_player.SERVICE_PLAY_MEDIA,
data,
blocking=False,
context=context,
@ -1825,13 +1823,13 @@ async def async_api_set_eq_mode(
context: ha.Context,
) -> AlexaResponse:
"""Process a SetMode request for EqualizerController."""
mode = directive.payload["mode"]
mode: str = directive.payload["mode"]
entity = directive.entity
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
if sound_mode_list and mode.lower() in sound_mode_list:
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
data[media_player.ATTR_SOUND_MODE] = mode.lower()
else:
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
raise AlexaInvalidValueError(msg)

View File

@ -3,10 +3,10 @@
from __future__ import annotations
from asyncio import timeout
from collections.abc import Mapping
from http import HTTPStatus
import json
import logging
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, cast
from uuid import uuid4
@ -260,10 +260,10 @@ async def async_enable_proactive_mode(
def extra_significant_check(
hass: HomeAssistant,
old_state: str,
old_attrs: dict[Any, Any] | MappingProxyType[Any, Any],
old_attrs: Mapping[Any, Any],
old_extra_arg: Any,
new_state: str,
new_attrs: dict[str, Any] | MappingProxyType[Any, Any],
new_attrs: Mapping[Any, Any],
new_extra_arg: Any,
) -> bool:
"""Check if the serialized data has changed."""

View File

@ -6,5 +6,5 @@
"iot_class": "cloud_push",
"loggers": ["boto3", "botocore", "s3transfer"],
"quality_scale": "legacy",
"requirements": ["boto3==1.34.131"]
"requirements": ["boto3==1.37.1"]
}

View File

@ -3,12 +3,12 @@
"step": {
"user": {
"data": {
"tracked_addons": "Addons",
"tracked_addons": "Add-ons",
"tracked_integrations": "Integrations",
"tracked_custom_integrations": "Custom integrations"
},
"data_description": {
"tracked_addons": "Select the addons you want to track",
"tracked_addons": "Select the add-ons you want to track",
"tracked_integrations": "Select the integrations you want to track",
"tracked_custom_integrations": "Select the custom integrations you want to track"
}

View File

@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
"iot_class": "local_polling",
"requirements": ["pydroid-ipcam==2.0.0"]
"requirements": ["pydroid-ipcam==3.0.0"]
}

View File

@ -73,7 +73,7 @@ class AndroidTVRemoteBaseEntity(Entity):
self._api.send_key_command(key_code, direction)
except ConnectionClosed as exc:
raise HomeAssistantError(
"Connection to Android TV device is closed"
translation_domain=DOMAIN, translation_key="connection_closed"
) from exc
def _send_launch_app_command(self, app_link: str) -> None:
@ -85,5 +85,5 @@ class AndroidTVRemoteBaseEntity(Entity):
self._api.send_launch_app_command(app_link)
except ConnectionClosed as exc:
raise HomeAssistantError(
"Connection to Android TV device is closed"
translation_domain=DOMAIN, translation_key="connection_closed"
) from exc

View File

@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AndroidTVRemoteConfigEntry
from .const import CONF_APP_ICON, CONF_APP_NAME
from .const import CONF_APP_ICON, CONF_APP_NAME, DOMAIN
from .entity import AndroidTVRemoteBaseEntity
PARALLEL_UPDATES = 0
@ -233,5 +233,5 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
await asyncio.sleep(delay_secs)
except ConnectionClosed as exc:
raise HomeAssistantError(
"Connection to Android TV device is closed"
translation_domain=DOMAIN, translation_key="connection_closed"
) from exc

View File

@ -54,5 +54,10 @@
}
}
}
},
"exceptions": {
"connection_closed": {
"message": "Connection to the Android TV device is closed"
}
}
}

View File

@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
from functools import partial
import logging
from types import MappingProxyType
@ -52,7 +53,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
@ -134,9 +135,8 @@ class AnthropicOptionsFlow(OptionsFlow):
if user_input is not None:
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if user_input[CONF_LLM_HASS_API] == "none":
user_input.pop(CONF_LLM_HASS_API)
if not user_input.get(CONF_LLM_HASS_API):
user_input.pop(CONF_LLM_HASS_API, None)
if user_input.get(
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
@ -151,12 +151,16 @@ class AnthropicOptionsFlow(OptionsFlow):
options = {
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
CONF_PROMPT: user_input[CONF_PROMPT],
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
}
suggested_values = options.copy()
if not suggested_values.get(CONF_PROMPT):
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
if (
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
) and isinstance(suggested_llm_apis, str):
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
schema = self.add_suggested_values_to_schema(
vol.Schema(anthropic_config_option_schema(self.hass, options)),
@ -172,28 +176,22 @@ class AnthropicOptionsFlow(OptionsFlow):
def anthropic_config_option_schema(
hass: HomeAssistant,
options: dict[str, Any] | MappingProxyType[str, Any],
options: Mapping[str, Any],
) -> dict:
"""Return a schema for Anthropic completion options."""
hass_apis: list[SelectOptionDict] = [
SelectOptionDict(
label="No control",
value="none",
)
]
hass_apis.extend(
SelectOptionDict(
label=api.name,
value=api.id,
)
for api in llm.async_get_apis(hass)
)
]
schema = {
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
SelectSelectorConfig(options=hass_apis)
),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
vol.Required(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool,

View File

@ -9,11 +9,13 @@ from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
MessageDeltaUsage,
MessageParam,
MessageStreamEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
@ -31,6 +33,7 @@ from anthropic.types import (
ToolResultBlockParam,
ToolUseBlock,
ToolUseBlockParam,
Usage,
)
from voluptuous_openapi import convert
@ -162,7 +165,8 @@ def _convert_content(
return messages
async def _transform_stream(
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
@ -207,6 +211,7 @@ async def _transform_stream(
| None
) = None
current_tool_args: str
input_usage: Usage | None = None
async for response in result:
LOGGER.debug("Received response: %s", response)
@ -215,6 +220,7 @@ async def _transform_stream(
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
input_usage = response.message.usage
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_block = ToolUseBlockParam(
@ -265,32 +271,54 @@ async def _transform_stream(
if current_block is None:
raise ValueError("Unexpected stop event without a current block")
if current_block["type"] == "tool_use":
tool_block = cast(ToolUseBlockParam, current_block)
# tool block
tool_args = json.loads(current_tool_args) if current_tool_args else {}
tool_block["input"] = tool_args
current_block["input"] = tool_args
yield {
"tool_calls": [
llm.ToolInput(
id=tool_block["id"],
tool_name=tool_block["name"],
id=current_block["id"],
tool_name=current_block["name"],
tool_args=tool_args,
)
]
}
elif current_block["type"] == "thinking":
thinking_block = cast(ThinkingBlockParam, current_block)
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
# thinking block
LOGGER.debug("Thinking: %s", current_block["thinking"])
if current_message is None:
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
elif isinstance(response, RawMessageDeltaEvent):
if (usage := response.usage) is not None:
chat_log.async_trace(_create_token_stats(input_usage, usage))
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
def _create_token_stats(
input_usage: Usage | None, response_usage: MessageDeltaUsage
) -> dict[str, Any]:
"""Create token stats for conversation agent tracing."""
input_tokens = 0
cached_input_tokens = 0
if input_usage:
input_tokens = input_usage.input_tokens
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
output_tokens = response_usage.output_tokens
return {
"stats": {
"input_tokens": input_tokens,
"cached_input_tokens": cached_input_tokens,
"output_tokens": output_tokens,
}
}
class AnthropicConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
):
@ -393,7 +421,8 @@ class AnthropicConversationEntity(
[
content
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id, _transform_stream(stream, messages)
user_input.agent_id,
_transform_stream(chat_log, stream, messages),
)
if not isinstance(content, conversation.AssistantContent)
]

View File

@ -53,10 +53,8 @@ class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
"""Initialize the APCUPSd binary device."""
super().__init__(coordinator, context=description.key.upper())
# Set up unique id and device info if serial number is available.
if (serial_no := coordinator.data.serial_no) is not None:
self._attr_unique_id = f"{serial_no}_{description.key}"
self.entity_description = description
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
self._attr_device_info = coordinator.device_info
@property

View File

@ -85,11 +85,16 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
self._host = host
self._port = port
@property
def unique_device_id(self) -> str:
"""Return a unique ID of the device, which is the serial number (if available) or the config entry ID."""
return self.data.serial_no or self.config_entry.entry_id
@property
def device_info(self) -> DeviceInfo:
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
return DeviceInfo(
identifiers={(DOMAIN, self.data.serial_no or self.config_entry.entry_id)},
identifiers={(DOMAIN, self.unique_device_id)},
model=self.data.model,
manufacturer="APC",
name=self.data.name or "APC UPS",
@ -108,4 +113,7 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
data = await aioapcaccess.request_status(self._host, self._port)
return APCUPSdData(data)
except (OSError, asyncio.IncompleteReadError) as error:
raise UpdateFailed(error) from error
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from error

View File

@ -458,11 +458,8 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
"""Initialize the sensor."""
super().__init__(coordinator=coordinator, context=description.key.upper())
# Set up unique id and device info if serial number is available.
if (serial_no := coordinator.data.serial_no) is not None:
self._attr_unique_id = f"{serial_no}_{description.key}"
self.entity_description = description
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
self._attr_device_info = coordinator.device_info
# Initial update of attributes.

View File

@ -93,7 +93,7 @@
"name": "Internal temperature"
},
"last_self_test": {
"name": "Last self test"
"name": "Last self-test"
},
"last_transfer": {
"name": "Last transfer"
@ -177,7 +177,7 @@
"name": "Restore requirement"
},
"self_test_result": {
"name": "Self test result"
"name": "Self-test result"
},
"sensitivity": {
"name": "Sensitivity"
@ -195,7 +195,7 @@
"name": "Status"
},
"self_test_interval": {
"name": "Self test interval"
"name": "Self-test interval"
},
"time_left": {
"name": "Time left"
@ -219,5 +219,10 @@
"name": "Transfer to battery"
}
}
},
"exceptions": {
"cannot_connect": {
"message": "Cannot connect to APC UPS Daemon."
}
}
}

View File

@ -43,6 +43,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
config_entry: ApSystemsConfigEntry
device_version: str
battery_system: bool
def __init__(
self,
@ -68,6 +69,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
self.api.max_power = device_info.maxPower
self.api.min_power = device_info.minPower
self.device_version = device_info.devVer
self.battery_system = device_info.isBatterySystem
async def _async_update_data(self) -> ApSystemsSensorData:
try:

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/apsystems",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["apsystems-ez1==2.4.0"]
"loggers": ["APsystemsEZ1"],
"requirements": ["apsystems-ez1==2.6.0"]
}

View File

@ -21,7 +21,7 @@
"entity": {
"binary_sensor": {
"off_grid_status": {
"name": "Off grid status"
"name": "Off-grid status"
},
"dc_1_short_circuit_error_status": {
"name": "DC 1 short circuit error status"

View File

@ -36,6 +36,8 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
super().__init__(data)
self._api = data.coordinator.api
self._attr_unique_id = f"{data.device_id}_inverter_status"
if data.coordinator.battery_system:
self._attr_available = False
async def async_update(self) -> None:
"""Update switch status and availability."""

View File

@ -36,9 +36,9 @@
"wi_fi_strength": {
"name": "Wi-Fi strength",
"state": {
"low": "Low",
"medium": "Medium",
"high": "High"
"low": "[%key:common::state::low%]",
"medium": "[%key:common::state::medium%]",
"high": "[%key:common::state::high%]"
}
}
}

View File

@ -26,7 +26,7 @@
"sensor": {
"threshold": {
"state": {
"error": "Error",
"error": "[%key:common::state::error%]",
"green": "Green",
"yellow": "Yellow",
"red": "Red"

View File

@ -2,10 +2,9 @@
from __future__ import annotations
from collections.abc import Callable
from collections.abc import Callable, Mapping
from datetime import datetime, timedelta
import logging
from types import MappingProxyType
from typing import Any
from pyasuswrt import AsusWrtError
@ -363,7 +362,7 @@ class AsusWrtRouter:
"""Add a function to call when router is closed."""
self._on_close.append(func)
def update_options(self, new_options: MappingProxyType[str, Any]) -> bool:
def update_options(self, new_options: Mapping[str, Any]) -> bool:
"""Update router options."""
req_reload = False
for name, new_opt in new_options.items():

View File

@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
}

View File

@ -18,6 +18,7 @@ from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
ATTR_NAME,
CONF_ACTIONS,
CONF_ALIAS,
CONF_CONDITIONS,
CONF_DEVICE_ID,
@ -27,6 +28,7 @@ from homeassistant.const import (
CONF_MODE,
CONF_PATH,
CONF_PLATFORM,
CONF_TRIGGERS,
CONF_VARIABLES,
CONF_ZONE,
EVENT_HOMEASSISTANT_STARTED,
@ -86,11 +88,9 @@ from homeassistant.util.hass_dict import HassKey
from .config import AutomationConfig, ValidationStatus
from .const import (
CONF_ACTIONS,
CONF_INITIAL_STATE,
CONF_TRACE,
CONF_TRIGGER_VARIABLES,
CONF_TRIGGERS,
DEFAULT_INITIAL_STATE,
DOMAIN,
LOGGER,

View File

@ -14,11 +14,15 @@ from homeassistant.components import blueprint
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
from homeassistant.config import config_per_platform, config_without_domain
from homeassistant.const import (
CONF_ACTION,
CONF_ACTIONS,
CONF_ALIAS,
CONF_CONDITION,
CONF_CONDITIONS,
CONF_DESCRIPTION,
CONF_ID,
CONF_TRIGGER,
CONF_TRIGGERS,
CONF_VARIABLES,
)
from homeassistant.core import HomeAssistant
@ -30,14 +34,10 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.util.yaml.input import UndefinedSubstitution
from .const import (
CONF_ACTION,
CONF_ACTIONS,
CONF_HIDE_ENTITY,
CONF_INITIAL_STATE,
CONF_TRACE,
CONF_TRIGGER,
CONF_TRIGGER_VARIABLES,
CONF_TRIGGERS,
DOMAIN,
LOGGER,
)
@ -58,34 +58,9 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
def _backward_compat_schema(value: Any | None) -> Any:
"""Backward compatibility for automations."""
if not isinstance(value, dict):
return value
# `trigger` has been renamed to `triggers`
if CONF_TRIGGER in value:
if CONF_TRIGGERS in value:
raise vol.Invalid(
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
)
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
# `condition` has been renamed to `conditions`
if CONF_CONDITION in value:
if CONF_CONDITIONS in value:
raise vol.Invalid(
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
)
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
# `action` has been renamed to `actions`
if CONF_ACTION in value:
if CONF_ACTIONS in value:
raise vol.Invalid(
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
)
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
return value
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
PLATFORM_SCHEMA = vol.All(

View File

@ -2,10 +2,6 @@
import logging
CONF_ACTION = "action"
CONF_ACTIONS = "actions"
CONF_TRIGGER = "trigger"
CONF_TRIGGERS = "triggers"
CONF_TRIGGER_VARIABLES = "trigger_variables"
DOMAIN = "automation"

View File

@ -6,5 +6,5 @@
"iot_class": "cloud_push",
"loggers": ["aiobotocore", "botocore"],
"quality_scale": "legacy",
"requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"]
"requirements": ["aiobotocore==2.21.1", "botocore==1.37.1"]
}

View File

@ -0,0 +1,82 @@
"""The AWS S3 integration."""
from __future__ import annotations
import logging
from typing import cast
from aiobotocore.client import AioBaseClient as S3Client
from aiobotocore.session import AioSession
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from .const import (
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_ENDPOINT_URL,
CONF_SECRET_ACCESS_KEY,
DATA_BACKUP_AGENT_LISTENERS,
DOMAIN,
)
type S3ConfigEntry = ConfigEntry[S3Client]
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
"""Set up S3 from a config entry."""
data = cast(dict, entry.data)
try:
session = AioSession()
# pylint: disable-next=unnecessary-dunder-call
client = await session.create_client(
"s3",
endpoint_url=data.get(CONF_ENDPOINT_URL),
aws_secret_access_key=data[CONF_SECRET_ACCESS_KEY],
aws_access_key_id=data[CONF_ACCESS_KEY_ID],
).__aenter__()
await client.head_bucket(Bucket=data[CONF_BUCKET])
except ClientError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_credentials",
) from err
except ParamValidationError as err:
if "Invalid bucket name" in str(err):
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_bucket_name",
) from err
except ValueError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_endpoint_url",
) from err
except ConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
entry.runtime_data = client
def notify_backup_listeners() -> None:
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
listener()
entry.async_on_unload(entry.async_on_state_change(notify_backup_listeners))
return True
async def async_unload_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
"""Unload a config entry."""
client = entry.runtime_data
await client.__aexit__(None, None, None)
return True

View File

@ -0,0 +1,330 @@
"""Backup platform for the AWS S3 integration."""
from collections.abc import AsyncIterator, Callable, Coroutine
import functools
import json
import logging
from time import time
from typing import Any
from botocore.exceptions import BotoCoreError
from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupAgentError,
BackupNotFound,
suggested_filename,
)
from homeassistant.core import HomeAssistant, callback
from . import S3ConfigEntry
from .const import CONF_BUCKET, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
_LOGGER = logging.getLogger(__name__)
CACHE_TTL = 300
# S3 part size requirements: 5 MiB to 5 GiB per part
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
# We set the threshold to 20 MiB to avoid too many parts.
# Note that each part is allocated in the memory.
MULTIPART_MIN_PART_SIZE_BYTES = 20 * 2**20
def handle_boto_errors[T](
func: Callable[..., Coroutine[Any, Any, T]],
) -> Callable[..., Coroutine[Any, Any, T]]:
"""Handle BotoCoreError exceptions by converting them to BackupAgentError."""
@functools.wraps(func)
async def wrapper(*args: Any, **kwargs: Any) -> T:
"""Catch BotoCoreError and raise BackupAgentError."""
try:
return await func(*args, **kwargs)
except BotoCoreError as err:
error_msg = f"Failed during {func.__name__}"
raise BackupAgentError(error_msg) from err
return wrapper
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
"""Return a list of backup agents."""
entries: list[S3ConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
return [S3BackupAgent(hass, entry) for entry in entries]
@callback
def async_register_backup_agents_listener(
hass: HomeAssistant,
*,
listener: Callable[[], None],
**kwargs: Any,
) -> Callable[[], None]:
"""Register a listener to be called when agents are added or removed.
:return: A function to unregister the listener.
"""
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
@callback
def remove_listener() -> None:
"""Remove the listener."""
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
return remove_listener
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
"""Return the suggested filenames for the backup and metadata files."""
base_name = suggested_filename(backup).rsplit(".", 1)[0]
return f"{base_name}.tar", f"{base_name}.metadata.json"
class S3BackupAgent(BackupAgent):
"""Backup agent for the S3 integration."""
domain = DOMAIN
def __init__(self, hass: HomeAssistant, entry: S3ConfigEntry) -> None:
"""Initialize the S3 agent."""
super().__init__()
self._client = entry.runtime_data
self._bucket: str = entry.data[CONF_BUCKET]
self.name = entry.title
self.unique_id = entry.entry_id
self._backup_cache: dict[str, AgentBackup] = {}
self._cache_expiration = time()
@handle_boto_errors
async def async_download_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AsyncIterator[bytes]:
"""Download a backup file.
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
backup = await self._find_backup_by_id(backup_id)
tar_filename, _ = suggested_filenames(backup)
response = await self._client.get_object(Bucket=self._bucket, Key=tar_filename)
return response["Body"].iter_chunks()
async def async_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
backup: AgentBackup,
**kwargs: Any,
) -> None:
"""Upload a backup.
:param open_stream: A function returning an async iterator that yields bytes.
:param backup: Metadata about the backup that should be uploaded.
"""
tar_filename, metadata_filename = suggested_filenames(backup)
try:
if backup.size < MULTIPART_MIN_PART_SIZE_BYTES:
await self._upload_simple(tar_filename, open_stream)
else:
await self._upload_multipart(tar_filename, open_stream)
# Upload the metadata file
metadata_content = json.dumps(backup.as_dict())
await self._client.put_object(
Bucket=self._bucket,
Key=metadata_filename,
Body=metadata_content,
)
except BotoCoreError as err:
raise BackupAgentError("Failed to upload backup") from err
else:
# Reset cache after successful upload
self._cache_expiration = time()
async def _upload_simple(
self,
tar_filename: str,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
) -> None:
"""Upload a small file using simple upload.
:param tar_filename: The target filename for the backup.
:param open_stream: A function returning an async iterator that yields bytes.
"""
_LOGGER.debug("Starting simple upload for %s", tar_filename)
stream = await open_stream()
file_data = bytearray()
async for chunk in stream:
file_data.extend(chunk)
await self._client.put_object(
Bucket=self._bucket,
Key=tar_filename,
Body=bytes(file_data),
)
async def _upload_multipart(
self,
tar_filename: str,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
):
"""Upload a large file using multipart upload.
:param tar_filename: The target filename for the backup.
:param open_stream: A function returning an async iterator that yields bytes.
"""
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
multipart_upload = await self._client.create_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
)
upload_id = multipart_upload["UploadId"]
try:
parts = []
part_number = 1
buffer_size = 0 # bytes
buffer: list[bytes] = []
stream = await open_stream()
async for chunk in stream:
buffer_size += len(chunk)
buffer.append(chunk)
# If buffer size meets minimum part size, upload it as a part
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
_LOGGER.debug(
"Uploading part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
buffer_size = 0
buffer = []
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
await self._client.complete_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,
MultipartUpload={"Parts": parts},
)
except BotoCoreError:
try:
await self._client.abort_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,
)
except BotoCoreError:
_LOGGER.exception("Failed to abort multipart upload")
raise
@handle_boto_errors
async def async_delete_backup(
self,
backup_id: str,
**kwargs: Any,
) -> None:
"""Delete a backup file.
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
backup = await self._find_backup_by_id(backup_id)
tar_filename, metadata_filename = suggested_filenames(backup)
# Delete both the backup file and its metadata file
await self._client.delete_object(Bucket=self._bucket, Key=tar_filename)
await self._client.delete_object(Bucket=self._bucket, Key=metadata_filename)
# Reset cache after successful deletion
self._cache_expiration = time()
@handle_boto_errors
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._list_backups()
return list(backups.values())
@handle_boto_errors
async def async_get_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup:
"""Return a backup."""
return await self._find_backup_by_id(backup_id)
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
"""Find a backup by its backup ID."""
backups = await self._list_backups()
if backup := backups.get(backup_id):
return backup
raise BackupNotFound(f"Backup {backup_id} not found")
async def _list_backups(self) -> dict[str, AgentBackup]:
"""List backups, using a cache if possible."""
if time() <= self._cache_expiration:
return self._backup_cache
backups = {}
response = await self._client.list_objects_v2(Bucket=self._bucket)
# Filter for metadata files only
metadata_files = [
obj
for obj in response.get("Contents", [])
if obj["Key"].endswith(".metadata.json")
]
for metadata_file in metadata_files:
try:
# Download and parse metadata file
metadata_response = await self._client.get_object(
Bucket=self._bucket, Key=metadata_file["Key"]
)
metadata_content = await metadata_response["Body"].read()
metadata_json = json.loads(metadata_content)
except (BotoCoreError, json.JSONDecodeError) as err:
_LOGGER.warning(
"Failed to process metadata file %s: %s",
metadata_file["Key"],
err,
)
continue
backup = AgentBackup.from_dict(metadata_json)
backups[backup.backup_id] = backup
self._backup_cache = backups
self._cache_expiration = time() + CACHE_TTL
return self._backup_cache

View File

@ -0,0 +1,101 @@
"""Config flow for the AWS S3 integration."""
from __future__ import annotations
from typing import Any
from urllib.parse import urlparse
from aiobotocore.session import AioSession
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import (
AWS_DOMAIN,
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_ENDPOINT_URL,
CONF_SECRET_ACCESS_KEY,
DEFAULT_ENDPOINT_URL,
DESCRIPTION_AWS_S3_DOCS_URL,
DESCRIPTION_BOTO3_DOCS_URL,
DOMAIN,
)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_ACCESS_KEY_ID): cv.string,
vol.Required(CONF_SECRET_ACCESS_KEY): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
vol.Required(CONF_BUCKET): cv.string,
vol.Required(CONF_ENDPOINT_URL, default=DEFAULT_ENDPOINT_URL): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.URL)
),
}
)
class S3ConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initiated by the user."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(
{
CONF_BUCKET: user_input[CONF_BUCKET],
CONF_ENDPOINT_URL: user_input[CONF_ENDPOINT_URL],
}
)
if not urlparse(user_input[CONF_ENDPOINT_URL]).hostname.endswith(
AWS_DOMAIN
):
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
else:
try:
session = AioSession()
async with session.create_client(
"s3",
endpoint_url=user_input.get(CONF_ENDPOINT_URL),
aws_secret_access_key=user_input[CONF_SECRET_ACCESS_KEY],
aws_access_key_id=user_input[CONF_ACCESS_KEY_ID],
) as client:
await client.head_bucket(Bucket=user_input[CONF_BUCKET])
except ClientError:
errors["base"] = "invalid_credentials"
except ParamValidationError as err:
if "Invalid bucket name" in str(err):
errors[CONF_BUCKET] = "invalid_bucket_name"
except ValueError:
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
except ConnectionError:
errors[CONF_ENDPOINT_URL] = "cannot_connect"
else:
return self.async_create_entry(
title=user_input[CONF_BUCKET], data=user_input
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA, user_input
),
errors=errors,
description_placeholders={
"aws_s3_docs_url": DESCRIPTION_AWS_S3_DOCS_URL,
"boto3_docs_url": DESCRIPTION_BOTO3_DOCS_URL,
},
)

View File

@ -0,0 +1,23 @@
"""Constants for the AWS S3 integration."""
from collections.abc import Callable
from typing import Final
from homeassistant.util.hass_dict import HassKey
DOMAIN: Final = "aws_s3"
CONF_ACCESS_KEY_ID = "access_key_id"
CONF_SECRET_ACCESS_KEY = "secret_access_key"
CONF_ENDPOINT_URL = "endpoint_url"
CONF_BUCKET = "bucket"
AWS_DOMAIN = "amazonaws.com"
DEFAULT_ENDPOINT_URL = f"https://s3.eu-central-1.{AWS_DOMAIN}/"
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
f"{DOMAIN}.backup_agent_listeners"
)
DESCRIPTION_AWS_S3_DOCS_URL = "https://docs.aws.amazon.com/general/latest/gr/s3.html"
DESCRIPTION_BOTO3_DOCS_URL = "https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html"

View File

@ -0,0 +1,12 @@
{
"domain": "aws_s3",
"name": "AWS S3",
"codeowners": ["@tomasbedrich"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aws_s3",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["aiobotocore"],
"quality_scale": "bronze",
"requirements": ["aiobotocore==2.21.1"]
}

View File

@ -0,0 +1,112 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling:
status: exempt
comment: This integration does not poll.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not have any custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Entities of this integration does not explicitly subscribe to events.
entity-unique-id:
status: exempt
comment: This integration does not have entities.
has-entity-name:
status: exempt
comment: This integration does not have entities.
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable:
status: exempt
comment: This integration does not have entities.
integration-owner: done
log-when-unavailable: todo
parallel-updates:
status: exempt
comment: This integration does not poll.
reauthentication-flow: todo
test-coverage: done
# Gold
devices:
status: exempt
comment: This integration does not have entities.
diagnostics: todo
discovery-update-info:
status: exempt
comment: S3 is a cloud service that is not discovered on the network.
discovery:
status: exempt
comment: S3 is a cloud service that is not discovered on the network.
docs-data-update:
status: exempt
comment: This integration does not poll.
docs-examples:
status: exempt
comment: The integration extends core functionality and does not require examples.
docs-known-limitations:
status: exempt
comment: No known limitations.
docs-supported-devices:
status: exempt
comment: This integration does not support physical devices.
docs-supported-functions: done
docs-troubleshooting:
status: exempt
comment: There are no more detailed troubleshooting instructions available than what is already included in strings.json.
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration does not have devices.
entity-category:
status: exempt
comment: This integration does not have entities.
entity-device-class:
status: exempt
comment: This integration does not have entities.
entity-disabled-by-default:
status: exempt
comment: This integration does not have entities.
entity-translations:
status: exempt
comment: This integration does not have entities.
exception-translations: done
icon-translations:
status: exempt
comment: This integration does not use icons.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: There are no issues which can be repaired.
stale-devices:
status: exempt
comment: This integration does not have devices.
# Platinum
async-dependency: done
inject-websession: todo
strict-typing: todo

View File

@ -0,0 +1,41 @@
{
"config": {
"step": {
"user": {
"data": {
"access_key_id": "Access key ID",
"secret_access_key": "Secret access key",
"bucket": "Bucket name",
"endpoint_url": "Endpoint URL"
},
"data_description": {
"access_key_id": "Access key ID to connect to AWS S3 API",
"secret_access_key": "Secret access key to connect to AWS S3 API",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"endpoint_url": "Endpoint URL provided to [Boto3 Session]({boto3_docs_url}). Region-specific [AWS S3 endpoints]({aws_s3_docs_url}) are available in their docs."
},
"title": "Add AWS S3 bucket"
}
},
"error": {
"cannot_connect": "[%key:component::aws_s3::exceptions::cannot_connect::message%]",
"invalid_bucket_name": "[%key:component::aws_s3::exceptions::invalid_bucket_name::message%]",
"invalid_credentials": "[%key:component::aws_s3::exceptions::invalid_credentials::message%]",
"invalid_endpoint_url": "Invalid endpoint URL. Please make sure it's a valid AWS S3 endpoint URL."
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "Cannot connect to endpoint"
},
"invalid_bucket_name": {
"message": "Invalid bucket name"
},
"invalid_credentials": {
"message": "Bucket cannot be accessed using provided combination of access key ID and secret access key."
}
}
}

View File

@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Mapping
from ipaddress import ip_address
from types import MappingProxyType
from typing import Any
from urllib.parse import urlsplit
@ -88,7 +87,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
if user_input is not None:
try:
api = await get_axis_api(self.hass, MappingProxyType(user_input))
api = await get_axis_api(self.hass, user_input)
except AuthenticationRequired:
errors["base"] = "invalid_auth"

View File

@ -1,7 +1,7 @@
"""Axis network device abstraction."""
from asyncio import timeout
from types import MappingProxyType
from collections.abc import Mapping
from typing import Any
import axis
@ -23,7 +23,7 @@ from ..errors import AuthenticationRequired, CannotConnect
async def get_axis_api(
hass: HomeAssistant,
config: MappingProxyType[str, Any],
config: Mapping[str, Any],
) -> axis.AxisDevice:
"""Create a Axis device API."""
session = get_async_client(hass, verify_ssl=False)

View File

@ -3,11 +3,10 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
from collections.abc import Callable, Mapping
from datetime import datetime
import json
import logging
from types import MappingProxyType
from typing import Any
from azure.eventhub import EventData, EventDataBatch
@ -179,7 +178,7 @@ class AzureEventHub:
await self.async_send(None)
await self._queue.join()
def update_options(self, new_options: MappingProxyType[str, Any]) -> None:
def update_options(self, new_options: Mapping[str, Any]) -> None:
"""Update options."""
self._send_interval = new_options[CONF_SEND_INTERVAL]

View File

@ -2,6 +2,7 @@
from __future__ import annotations
from collections import defaultdict
from dataclasses import dataclass, field, replace
import datetime as dt
from datetime import datetime, timedelta
@ -87,12 +88,26 @@ class BackupConfigData:
else:
time = None
days = [Day(day) for day in data["schedule"]["days"]]
agents = {}
for agent_id, agent_data in data["agents"].items():
protected = agent_data["protected"]
stored_retention = agent_data["retention"]
agent_retention: AgentRetentionConfig | None
if stored_retention:
agent_retention = AgentRetentionConfig(
copies=stored_retention["copies"],
days=stored_retention["days"],
)
else:
agent_retention = None
agent_config = AgentConfig(
protected=protected,
retention=agent_retention,
)
agents[agent_id] = agent_config
return cls(
agents={
agent_id: AgentConfig(protected=agent_data["protected"])
for agent_id, agent_data in data["agents"].items()
},
agents=agents,
automatic_backups_configured=data["automatic_backups_configured"],
create_backup=CreateBackupConfig(
agent_ids=data["create_backup"]["agent_ids"],
@ -176,12 +191,36 @@ class BackupConfig:
"""Update config."""
if agents is not UNDEFINED:
for agent_id, agent_config in agents.items():
if agent_id not in self.data.agents:
self.data.agents[agent_id] = AgentConfig(**agent_config)
agent_retention = agent_config.get("retention")
if agent_retention is None:
new_agent_retention = None
else:
self.data.agents[agent_id] = replace(
self.data.agents[agent_id], **agent_config
new_agent_retention = AgentRetentionConfig(
copies=agent_retention.get("copies"),
days=agent_retention.get("days"),
)
if agent_id not in self.data.agents:
old_agent_retention = None
self.data.agents[agent_id] = AgentConfig(
protected=agent_config.get("protected", True),
retention=new_agent_retention,
)
else:
new_agent_config = self.data.agents[agent_id]
old_agent_retention = new_agent_config.retention
if "protected" in agent_config:
new_agent_config = replace(
new_agent_config, protected=agent_config["protected"]
)
if "retention" in agent_config:
new_agent_config = replace(
new_agent_config, retention=new_agent_retention
)
self.data.agents[agent_id] = new_agent_config
if new_agent_retention != old_agent_retention:
# There's a single retention application method
# for both global and agent retention settings.
self.data.retention.apply(self._manager)
if automatic_backups_configured is not UNDEFINED:
self.data.automatic_backups_configured = automatic_backups_configured
if create_backup is not UNDEFINED:
@ -207,11 +246,24 @@ class AgentConfig:
"""Represent the config for an agent."""
protected: bool
"""Agent protected configuration.
If True, the agent backups are password protected.
"""
retention: AgentRetentionConfig | None = None
"""Agent retention configuration.
If None, the global retention configuration is used.
If not None, the global retention configuration is ignored for this agent.
If an agent retention configuration is set and both copies and days are None,
backups will be kept forever for that agent.
"""
def to_dict(self) -> StoredAgentConfig:
"""Convert agent config to a dict."""
return {
"protected": self.protected,
"retention": self.retention.to_dict() if self.retention else None,
}
@ -219,24 +271,46 @@ class StoredAgentConfig(TypedDict):
"""Represent the stored config for an agent."""
protected: bool
retention: StoredRetentionConfig | None
class AgentParametersDict(TypedDict, total=False):
"""Represent the parameters for an agent."""
protected: bool
retention: RetentionParametersDict | None
@dataclass(kw_only=True)
class RetentionConfig:
"""Represent the backup retention configuration."""
class BaseRetentionConfig:
"""Represent the base backup retention configuration."""
copies: int | None = None
days: int | None = None
def to_dict(self) -> StoredRetentionConfig:
"""Convert backup retention configuration to a dict."""
return StoredRetentionConfig(
copies=self.copies,
days=self.days,
)
@dataclass(kw_only=True)
class RetentionConfig(BaseRetentionConfig):
"""Represent the backup retention configuration."""
def apply(self, manager: BackupManager) -> None:
"""Apply backup retention configuration."""
if self.days is not None:
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
if self.days is not None or any(
agent_retention and agent_retention.days is not None
for agent_retention in agents_retention.values()
):
LOGGER.debug(
"Scheduling next automatic delete of backups older than %s in 1 day",
self.days,
@ -246,13 +320,6 @@ class RetentionConfig:
LOGGER.debug("Unscheduling next automatic delete")
self._unschedule_next(manager)
def to_dict(self) -> StoredRetentionConfig:
"""Convert backup retention configuration to a dict."""
return StoredRetentionConfig(
copies=self.copies,
days=self.days,
)
@callback
def _schedule_next(
self,
@ -271,17 +338,82 @@ class RetentionConfig:
"""Return backups older than days to delete."""
# we need to check here since we await before
# this filter is applied
if self.days is None:
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
has_agents_retention = any(
agent_retention for agent_retention in agents_retention.values()
)
has_agents_retention_days = any(
agent_retention and agent_retention.days is not None
for agent_retention in agents_retention.values()
)
if (global_days := self.days) is None and not has_agents_retention_days:
# No global retention days and no agent retention days
return {}
now = dt_util.utcnow()
if global_days is not None and not has_agents_retention:
# Return early to avoid the longer filtering below.
return {
backup_id: backup
for backup_id, backup in backups.items()
if dt_util.parse_datetime(backup.date, raise_on_error=True)
+ timedelta(days=self.days)
+ timedelta(days=global_days)
< now
}
# If there are any agent retention settings, we need to check
# the retention settings, for every backup and agent combination.
backups_to_delete = {}
for backup_id, backup in backups.items():
backup_date = dt_util.parse_datetime(
backup.date, raise_on_error=True
)
delete_from_agents = set(backup.agents)
for agent_id in backup.agents:
agent_retention = agents_retention.get(agent_id)
if agent_retention is None:
# This agent does not have a retention setting,
# so the global retention setting should be used.
if global_days is None:
# This agent does not have a retention setting
# and the global retention days setting is None,
# so this backup should not be deleted.
delete_from_agents.discard(agent_id)
continue
days = global_days
elif (agent_days := agent_retention.days) is None:
# This agent has a retention setting
# where days is set to None,
# so the backup should not be deleted.
delete_from_agents.discard(agent_id)
continue
else:
# This agent has a retention setting
# where days is set to a number,
# so that setting should be used.
days = agent_days
if backup_date + timedelta(days=days) >= now:
# This backup is not older than the retention days,
# so this agent should not be deleted.
delete_from_agents.discard(agent_id)
filtered_backup = replace(
backup,
agents={
agent_id: agent_backup_status
for agent_id, agent_backup_status in backup.agents.items()
if agent_id in delete_from_agents
},
)
backups_to_delete[backup_id] = filtered_backup
return backups_to_delete
await manager.async_delete_filtered_backups(
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
)
@ -312,6 +444,10 @@ class RetentionParametersDict(TypedDict, total=False):
days: int | None
class AgentRetentionConfig(BaseRetentionConfig):
"""Represent an agent retention configuration."""
class StoredBackupSchedule(TypedDict):
"""Represent the stored backup schedule configuration."""
@ -554,17 +690,88 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
backups: dict[str, ManagerBackup],
) -> dict[str, ManagerBackup]:
"""Return oldest backups more numerous than copies to delete."""
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
has_agents_retention = any(
agent_retention for agent_retention in agents_retention.values()
)
has_agents_retention_copies = any(
agent_retention and agent_retention.copies is not None
for agent_retention in agents_retention.values()
)
# we need to check here since we await before
# this filter is applied
if manager.config.data.retention.copies is None:
if (
global_copies := manager.config.data.retention.copies
) is None and not has_agents_retention_copies:
# No global retention copies and no agent retention copies
return {}
if global_copies is not None and not has_agents_retention:
# Return early to avoid the longer filtering below.
return dict(
sorted(
backups.items(),
key=lambda backup_item: backup_item[1].date,
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
)[: max(len(backups) - global_copies, 0)]
)
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
for backup_id, backup in backups.items():
for agent_id in backup.agents:
backups_by_agent[agent_id][backup_id] = backup
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
dict
)
for agent_id, agent_backups in backups_by_agent.items():
agent_retention = agents_retention.get(agent_id)
if agent_retention is None:
# This agent does not have a retention setting,
# so the global retention setting should be used.
if global_copies is None:
# This agent does not have a retention setting
# and the global retention copies setting is None,
# so backups should not be deleted.
continue
# The global retention setting will be used.
copies = global_copies
elif (agent_copies := agent_retention.copies) is None:
# This agent has a retention setting
# where copies is set to None,
# so backups should not be deleted.
continue
else:
# This agent retention setting will be used.
copies = agent_copies
backups_to_delete_by_agent[agent_id] = dict(
sorted(
agent_backups.items(),
key=lambda backup_item: backup_item[1].date,
)[: max(len(agent_backups) - copies, 0)]
)
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
for agent_id, to_delete in backups_to_delete_by_agent.items():
for backup_id in to_delete:
backup_ids_to_delete[backup_id].add(agent_id)
backups_to_delete: dict[str, ManagerBackup] = {}
for backup_id, agent_ids in backup_ids_to_delete.items():
backup = backups[backup_id]
# filter the backup to only include the agents that should be deleted
filtered_backup = replace(
backup,
agents={
agent_id: agent_backup_status
for agent_id, agent_backup_status in backup.agents.items()
if agent_id in agent_ids
},
)
backups_to_delete[backup_id] = filtered_backup
return backups_to_delete
await manager.async_delete_filtered_backups(
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
)

View File

@ -0,0 +1,136 @@
"""Backup onboarding views."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from functools import wraps
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Concatenate
from aiohttp import web
from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol
from homeassistant.components.http import KEY_HASS
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.components.onboarding import (
BaseOnboardingView,
NoAuthBaseOnboardingView,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
if TYPE_CHECKING:
from homeassistant.components.onboarding import OnboardingStoreData
async def async_setup_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
"""Set up the backup views."""
hass.http.register_view(BackupInfoView(data))
hass.http.register_view(RestoreBackupView(data))
hass.http.register_view(UploadBackupView(data))
def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
func: Callable[
Concatenate[_ViewT, BackupManager, web.Request, _P],
Coroutine[Any, Any, web.Response],
],
) -> Callable[Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]]:
"""Home Assistant API decorator to check onboarding and inject manager."""
@wraps(func)
async def with_backup(
self: _ViewT,
request: web.Request,
*args: _P.args,
**kwargs: _P.kwargs,
) -> web.Response:
"""Check admin and call function."""
if self._data["done"]:
raise HTTPUnauthorized
manager = await async_get_backup_manager(request.app[KEY_HASS])
return await func(self, manager, request, *args, **kwargs)
return with_backup
class BackupInfoView(NoAuthBaseOnboardingView):
"""Get backup info view."""
url = "/api/onboarding/backup/info"
name = "api:onboarding:backup:info"
@with_backup_manager
async def get(self, manager: BackupManager, request: web.Request) -> web.Response:
"""Return backup info."""
backups, _ = await manager.async_get_backups()
return self.json(
{
"backups": list(backups.values()),
"state": manager.state,
"last_action_event": manager.last_action_event,
}
)
class RestoreBackupView(NoAuthBaseOnboardingView):
"""Restore backup view."""
url = "/api/onboarding/backup/restore"
name = "api:onboarding:backup:restore"
@RequestDataValidator(
vol.Schema(
{
vol.Required("backup_id"): str,
vol.Required("agent_id"): str,
vol.Optional("password"): str,
vol.Optional("restore_addons"): [str],
vol.Optional("restore_database", default=True): bool,
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
}
)
)
@with_backup_manager
async def post(
self, manager: BackupManager, request: web.Request, data: dict[str, Any]
) -> web.Response:
"""Restore a backup."""
try:
await manager.async_restore_backup(
data["backup_id"],
agent_id=data["agent_id"],
password=data.get("password"),
restore_addons=data.get("restore_addons"),
restore_database=data["restore_database"],
restore_folders=data.get("restore_folders"),
restore_homeassistant=True,
)
except IncorrectPasswordError:
return self.json(
{"code": "incorrect_password"}, status_code=HTTPStatus.BAD_REQUEST
)
except HomeAssistantError as err:
return self.json(
{"code": "restore_failed", "message": str(err)},
status_code=HTTPStatus.BAD_REQUEST,
)
return web.Response(status=HTTPStatus.OK)
class UploadBackupView(NoAuthBaseOnboardingView, backup_http.UploadBackupView):
"""Upload backup view."""
url = "/api/onboarding/backup/upload"
name = "api:onboarding:backup:upload"
@with_backup_manager
async def post(self, manager: BackupManager, request: web.Request) -> web.Response:
"""Upload a backup file."""
return await self._post(request)

View File

@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 5
STORAGE_VERSION_MINOR = 6
class StoredBackupData(TypedDict):
@ -72,6 +72,10 @@ class _BackupStore(Store[StoredBackupData]):
data["config"]["automatic_backups_configured"] = (
data["config"]["create_backup"]["password"] is not None
)
if old_minor_version < 6:
# Version 1.6 adds agent retention settings
for agent in data["config"]["agents"]:
data["config"]["agents"][agent]["retention"] = None
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.

View File

@ -26,9 +26,9 @@
"entity": {
"sensor": {
"backup_manager_state": {
"name": "Backup Manager State",
"name": "Backup Manager state",
"state": {
"idle": "Idle",
"idle": "[%key:common::state::idle%]",
"create_backup": "Creating a backup",
"receive_backup": "Receiving a backup",
"restore_backup": "Restoring a backup"

View File

@ -346,7 +346,28 @@ async def handle_config_info(
@websocket_api.websocket_command(
{
vol.Required("type"): "backup/config/update",
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
vol.Optional("agents"): vol.Schema(
{
str: {
vol.Optional("protected"): bool,
vol.Optional("retention"): vol.Any(
vol.Schema(
{
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(
vol.All(int, vol.Range(min=1)), None
),
vol.Optional("days"): vol.Any(
vol.All(int, vol.Range(min=1)), None
),
},
),
None,
),
}
}
),
vol.Optional("automatic_backups_configured"): bool,
vol.Optional("create_backup"): vol.Schema(
{

View File

@ -31,7 +31,7 @@
"state_attributes": {
"preset_mode": {
"state": {
"auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]"
"auto": "[%key:common::state::auto%]"
}
}
}

View File

@ -0,0 +1 @@
"""Balay virtual integration."""

View File

@ -0,0 +1,6 @@
{
"domain": "balay",
"name": "Balay",
"integration_type": "virtual",
"supported_by": "home_connect"
}

View File

@ -103,8 +103,8 @@
"temperature_range": {
"name": "Temperature range",
"state": {
"low": "Low",
"high": "High"
"low": "[%key:common::state::low%]",
"high": "[%key:common::state::high%]"
}
}
},

View File

@ -124,15 +124,15 @@
"battery": {
"name": "Battery",
"state": {
"off": "Normal",
"on": "Low"
"off": "[%key:common::state::normal%]",
"on": "[%key:common::state::low%]"
}
},
"battery_charging": {
"name": "Charging",
"state": {
"off": "Not charging",
"on": "Charging"
"on": "[%key:common::state::charging%]"
}
},
"carbon_monoxide": {
@ -145,7 +145,7 @@
"cold": {
"name": "Cold",
"state": {
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
"off": "[%key:common::state::normal%]",
"on": "Cold"
}
},
@ -180,7 +180,7 @@
"heat": {
"name": "Heat",
"state": {
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
"off": "[%key:common::state::normal%]",
"on": "Hot"
}
},

View File

@ -30,18 +30,18 @@
"available": "Available",
"charging": "[%key:common::state::charging%]",
"unavailable": "Unavailable",
"error": "Error",
"error": "[%key:common::state::error%]",
"offline": "Offline"
}
},
"vehicle_status": {
"name": "Vehicle status",
"state": {
"standby": "Standby",
"standby": "[%key:common::state::standby%]",
"vehicle_detected": "Detected",
"ready": "Ready",
"no_power": "No power",
"vehicle_error": "Error"
"vehicle_error": "[%key:common::state::error%]"
}
},
"actual_v1": {

View File

@ -12,5 +12,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
"iot_class": "local_push",
"requirements": ["bluemaestro-ble==0.2.3"]
"requirements": ["bluemaestro-ble==0.4.1"]
}

View File

@ -6,7 +6,7 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/bluesound",
"iot_class": "local_polling",
"requirements": ["pyblu==2.0.0"],
"requirements": ["pyblu==2.0.1"],
"zeroconf": [
{
"type": "_musc._tcp.local."

View File

@ -330,7 +330,12 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
if self._status.input_id is not None:
for input_ in self._inputs:
if input_.id == self._status.input_id:
# the input might not have an id => also try to match on the stream_url/url
# we have to use both because neither matches all the time
if (
input_.id == self._status.input_id
or input_.url == self._status.stream_url
):
return input_.text
for preset in self._presets:

View File

@ -18,9 +18,9 @@
"bleak==0.22.3",
"bleak-retry-connector==3.9.0",
"bluetooth-adapters==0.21.4",
"bluetooth-auto-recovery==1.4.5",
"bluetooth-data-tools==1.26.5",
"bluetooth-auto-recovery==1.5.1",
"bluetooth-data-tools==1.28.1",
"dbus-fast==2.43.0",
"habluetooth==3.37.0"
"habluetooth==3.48.2"
]
}

View File

@ -374,6 +374,27 @@ class PassiveBluetoothProcessorCoordinator[_DataT](BasePassiveBluetoothCoordinat
self.logger.exception("Unexpected error updating %s data", self.name)
return
self._process_update(update, was_available)
@callback
def async_set_updated_data(self, update: _DataT) -> None:
"""Manually update the processor with new data.
If the data comes in via a different method, like a
notification, this method can be used to update the
processor with the new data.
This is useful for devices that retrieve
some of their data via notifications.
"""
was_available = self._available
self._available = True
self._process_update(update, was_available)
def _process_update(
self, update: _DataT, was_available: bool | None = None
) -> None:
"""Process the update from the bluetooth device."""
if not self.last_update_success:
self.last_update_success = True
self.logger.info("Coordinator %s recovered", self.name)

View File

@ -2,7 +2,7 @@
from __future__ import annotations
from bluetooth_adapters import (
from habluetooth import (
DiscoveredDeviceAdvertisementData,
DiscoveredDeviceAdvertisementDataDict,
DiscoveryStorageType,

View File

@ -6,7 +6,7 @@
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"region": "ConnectedDrive Region"
"region": "ConnectedDrive region"
},
"data_description": {
"username": "The email address of your MyBMW/MINI Connected account.",
@ -113,10 +113,10 @@
},
"select": {
"ac_limit": {
"name": "AC Charging Limit"
"name": "AC charging limit"
},
"charging_mode": {
"name": "Charging Mode",
"name": "Charging mode",
"state": {
"immediate_charging": "Immediate charging",
"delayed_charging": "Delayed charging",
@ -139,7 +139,7 @@
"state": {
"default": "Default",
"charging": "[%key:common::state::charging%]",
"error": "Error",
"error": "[%key:common::state::error%]",
"complete": "Complete",
"fully_charged": "Fully charged",
"finished_fully_charged": "Finished, fully charged",
@ -181,7 +181,7 @@
"cooling": "Cooling",
"heating": "Heating",
"inactive": "Inactive",
"standby": "Standby",
"standby": "[%key:common::state::standby%]",
"ventilation": "Ventilation"
}
},

View File

@ -16,6 +16,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import DOMAIN
@ -91,11 +92,22 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovered[CONF_ACCESS_TOKEN] = token
try:
_, hub_name = await _validate_input(self.hass, self._discovered)
bond_id, hub_name = await _validate_input(self.hass, self._discovered)
except InputValidationError:
return
await self.async_set_unique_id(bond_id)
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
self._discovered[CONF_NAME] = hub_name
async def async_step_dhcp(
self, discovery_info: DhcpServiceInfo
) -> ConfigFlowResult:
"""Handle a flow initialized by dhcp discovery."""
host = discovery_info.ip
bond_id = discovery_info.hostname.partition("-")[2].upper()
await self.async_set_unique_id(bond_id)
return await self.async_step_any_discovery(bond_id, host)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
@ -104,11 +116,17 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
host: str = discovery_info.host
bond_id = name.partition(".")[0]
await self.async_set_unique_id(bond_id)
return await self.async_step_any_discovery(bond_id, host)
async def async_step_any_discovery(
self, bond_id: str, host: str
) -> ConfigFlowResult:
"""Handle a flow initialized by discovery."""
for entry in self._async_current_entries():
if entry.unique_id != bond_id:
continue
updates = {CONF_HOST: host}
if entry.state == ConfigEntryState.SETUP_ERROR and (
if entry.state is ConfigEntryState.SETUP_ERROR and (
token := await async_get_token(self.hass, host)
):
updates[CONF_ACCESS_TOKEN] = token
@ -153,10 +171,14 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
CONF_HOST: self._discovered[CONF_HOST],
}
try:
_, hub_name = await _validate_input(self.hass, data)
bond_id, hub_name = await _validate_input(self.hass, data)
except InputValidationError as error:
errors["base"] = error.base
else:
await self.async_set_unique_id(bond_id)
self._abort_if_unique_id_configured(
updates={CONF_HOST: self._discovered[CONF_HOST]}
)
return self.async_create_entry(
title=hub_name,
data=data,
@ -185,8 +207,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
except InputValidationError as error:
errors["base"] = error.base
else:
await self.async_set_unique_id(bond_id)
self._abort_if_unique_id_configured()
await self.async_set_unique_id(bond_id, raise_on_progress=False)
self._abort_if_unique_id_configured(
updates={CONF_HOST: user_input[CONF_HOST]}
)
return self.async_create_entry(title=hub_name, data=user_input)
return self.async_show_form(

View File

@ -3,6 +3,16 @@
"name": "Bond",
"codeowners": ["@bdraco", "@prystupa", "@joshs85", "@marciogranzotto"],
"config_flow": true,
"dhcp": [
{
"hostname": "bond-*",
"macaddress": "3C6A2C1*"
},
{
"hostname": "bond-*",
"macaddress": "F44E38*"
}
],
"documentation": "https://www.home-assistant.io/integrations/bond",
"iot_class": "local_push",
"loggers": ["bond_async"],

View File

@ -9,12 +9,12 @@ from bosch_alarm_mode2 import Panel
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL]
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL, Platform.SENSOR]
type BoschAlarmConfigEntry = ConfigEntry[Panel]
@ -34,10 +34,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -
await panel.connect()
except (PermissionError, ValueError) as err:
await panel.disconnect()
raise ConfigEntryNotReady from err
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="authentication_failed"
) from err
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
await panel.disconnect()
raise ConfigEntryNotReady("Connection failed") from err
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
entry.runtime_data = panel

View File

@ -10,11 +10,10 @@ from homeassistant.components.alarm_control_panel import (
AlarmControlPanelState,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BoschAlarmConfigEntry
from .const import DOMAIN
from .entity import BoschAlarmAreaEntity
async def async_setup_entry(
@ -35,7 +34,7 @@ async def async_setup_entry(
)
class AreaAlarmControlPanel(AlarmControlPanelEntity):
class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
"""An alarm control panel entity for a bosch alarm panel."""
_attr_has_entity_name = True
@ -48,19 +47,8 @@ class AreaAlarmControlPanel(AlarmControlPanelEntity):
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
"""Initialise a Bosch Alarm control panel entity."""
self.panel = panel
self._area = panel.areas[area_id]
self._area_id = area_id
self._attr_unique_id = f"{unique_id}_area_{area_id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._attr_unique_id)},
name=self._area.name,
manufacturer="Bosch Security Systems",
via_device=(
DOMAIN,
unique_id,
),
)
super().__init__(panel, area_id, unique_id, False, False, True)
self._attr_unique_id = self._area_unique_id
@property
def alarm_state(self) -> AlarmControlPanelState | None:
@ -90,20 +78,3 @@ class AreaAlarmControlPanel(AlarmControlPanelEntity):
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send arm away command."""
await self.panel.area_arm_all(self._area_id)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.panel.connection_status()
async def async_added_to_hass(self) -> None:
"""Run when entity attached to hass."""
await super().async_added_to_hass()
self._area.status_observer.attach(self.schedule_update_ha_state)
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Run when entity removed from hass."""
await super().async_will_remove_from_hass()
self._area.status_observer.detach(self.schedule_update_ha_state)
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)

View File

@ -3,6 +3,7 @@
from __future__ import annotations
import asyncio
from collections.abc import Mapping
import logging
import ssl
from typing import Any
@ -10,7 +11,12 @@ from typing import Any
from bosch_alarm_mode2 import Panel
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import (
SOURCE_RECONFIGURE,
SOURCE_USER,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.const import (
CONF_CODE,
CONF_HOST,
@ -107,6 +113,13 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
else:
self._data = user_input
self._data[CONF_MODEL] = model
if self.source == SOURCE_RECONFIGURE:
if (
self._get_reconfigure_entry().data[CONF_MODEL]
!= self._data[CONF_MODEL]
):
return self.async_abort(reason="device_mismatch")
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
@ -116,6 +129,12 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the reconfigure step."""
return await self.async_step_user()
async def async_step_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@ -153,13 +172,77 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
else:
if serial_number:
await self.async_set_unique_id(str(serial_number))
if self.source == SOURCE_USER:
if serial_number:
self._abort_if_unique_id_configured()
else:
self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]})
return self.async_create_entry(title=f"Bosch {model}", data=self._data)
self._async_abort_entries_match(
{CONF_HOST: self._data[CONF_HOST]}
)
return self.async_create_entry(
title=f"Bosch {model}", data=self._data
)
if serial_number:
self._abort_if_unique_id_mismatch(reason="device_mismatch")
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data=self._data,
)
return self.async_show_form(
step_id="auth",
data_schema=self.add_suggested_values_to_schema(schema, user_input),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an authentication error."""
self._data = dict(entry_data)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the reauth step."""
errors: dict[str, str] = {}
# Each model variant requires a different authentication flow
if "Solution" in self._data[CONF_MODEL]:
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
elif "AMAX" in self._data[CONF_MODEL]:
schema = STEP_AUTH_DATA_SCHEMA_AMAX
else:
schema = STEP_AUTH_DATA_SCHEMA_BG
if user_input is not None:
reauth_entry = self._get_reauth_entry()
self._data.update(user_input)
try:
(_, _) = await try_connect(self._data, Panel.LOAD_EXTENDED_INFO)
except (PermissionError, ValueError) as e:
errors["base"] = "invalid_auth"
_LOGGER.error("Authentication Error: %s", e)
except (
OSError,
ConnectionRefusedError,
ssl.SSLError,
TimeoutError,
) as e:
_LOGGER.error("Connection Error: %s", e)
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates=user_input,
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=self.add_suggested_values_to_schema(schema, user_input),
errors=errors,
)

View File

@ -0,0 +1,73 @@
"""Diagnostics for bosch alarm."""
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_PASSWORD
from homeassistant.core import HomeAssistant
from . import BoschAlarmConfigEntry
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE
TO_REDACT = [CONF_INSTALLER_CODE, CONF_USER_CODE, CONF_PASSWORD]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: BoschAlarmConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
return {
"entry_data": async_redact_data(entry.data, TO_REDACT),
"data": {
"model": entry.runtime_data.model,
"serial_number": entry.runtime_data.serial_number,
"protocol_version": entry.runtime_data.protocol_version,
"firmware_version": entry.runtime_data.firmware_version,
"areas": [
{
"id": area_id,
"name": area.name,
"all_ready": area.all_ready,
"part_ready": area.part_ready,
"faults": area.faults,
"alarms": area.alarms,
"disarmed": area.is_disarmed(),
"arming": area.is_arming(),
"pending": area.is_pending(),
"part_armed": area.is_part_armed(),
"all_armed": area.is_all_armed(),
"armed": area.is_armed(),
"triggered": area.is_triggered(),
}
for area_id, area in entry.runtime_data.areas.items()
],
"points": [
{
"id": point_id,
"name": point.name,
"open": point.is_open(),
"normal": point.is_normal(),
}
for point_id, point in entry.runtime_data.points.items()
],
"doors": [
{
"id": door_id,
"name": door.name,
"open": door.is_open(),
"locked": door.is_locked(),
}
for door_id, door in entry.runtime_data.doors.items()
],
"outputs": [
{
"id": output_id,
"name": output.name,
"active": output.is_active(),
}
for output_id, output in entry.runtime_data.outputs.items()
],
"history_events": entry.runtime_data.events,
},
}

View File

@ -0,0 +1,88 @@
"""Support for Bosch Alarm Panel History as a sensor."""
from __future__ import annotations
from bosch_alarm_mode2 import Panel
from homeassistant.components.sensor import Entity
from homeassistant.helpers.device_registry import DeviceInfo
from .const import DOMAIN
PARALLEL_UPDATES = 0
class BoschAlarmEntity(Entity):
"""A base entity for a bosch alarm panel."""
_attr_has_entity_name = True
def __init__(self, panel: Panel, unique_id: str) -> None:
"""Set up a entity for a bosch alarm panel."""
self.panel = panel
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
name=f"Bosch {panel.model}",
manufacturer="Bosch Security Systems",
)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.panel.connection_status()
async def async_added_to_hass(self) -> None:
"""Observe state changes."""
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Stop observing state changes."""
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
class BoschAlarmAreaEntity(BoschAlarmEntity):
"""A base entity for area related entities within a bosch alarm panel."""
def __init__(
self,
panel: Panel,
area_id: int,
unique_id: str,
observe_alarms: bool,
observe_ready: bool,
observe_status: bool,
) -> None:
"""Set up a area related entity for a bosch alarm panel."""
super().__init__(panel, unique_id)
self._area_id = area_id
self._area_unique_id = f"{unique_id}_area_{area_id}"
self._observe_alarms = observe_alarms
self._observe_ready = observe_ready
self._observe_status = observe_status
self._area = panel.areas[area_id]
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._area_unique_id)},
name=self._area.name,
manufacturer="Bosch Security Systems",
via_device=(DOMAIN, unique_id),
)
async def async_added_to_hass(self) -> None:
"""Observe state changes."""
await super().async_added_to_hass()
if self._observe_alarms:
self._area.alarm_observer.attach(self.schedule_update_ha_state)
if self._observe_ready:
self._area.ready_observer.attach(self.schedule_update_ha_state)
if self._observe_status:
self._area.status_observer.attach(self.schedule_update_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Stop observing state changes."""
await super().async_added_to_hass()
if self._observe_alarms:
self._area.alarm_observer.detach(self.schedule_update_ha_state)
if self._observe_ready:
self._area.ready_observer.detach(self.schedule_update_ha_state)
if self._observe_status:
self._area.status_observer.detach(self.schedule_update_ha_state)

View File

@ -0,0 +1,9 @@
{
"entity": {
"sensor": {
"faulting_points": {
"default": "mdi:alert-circle-outline"
}
}
}
}

View File

@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["bosch-alarm-mode2==0.4.3"]
"requirements": ["bosch-alarm-mode2==0.4.6"]
}

View File

@ -40,7 +40,7 @@ rules:
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: done
# Gold
@ -62,9 +62,9 @@ rules:
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
entity-translations: done
exception-translations: todo
icon-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt

View File

@ -0,0 +1,86 @@
"""Support for Bosch Alarm Panel History as a sensor."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from bosch_alarm_mode2 import Panel
from bosch_alarm_mode2.panel import Area
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BoschAlarmConfigEntry
from .entity import BoschAlarmAreaEntity
@dataclass(kw_only=True, frozen=True)
class BoschAlarmSensorEntityDescription(SensorEntityDescription):
"""Describes Bosch Alarm sensor entity."""
value_fn: Callable[[Area], int]
observe_alarms: bool = False
observe_ready: bool = False
observe_status: bool = False
SENSOR_TYPES: list[BoschAlarmSensorEntityDescription] = [
BoschAlarmSensorEntityDescription(
key="faulting_points",
translation_key="faulting_points",
value_fn=lambda area: area.faults,
observe_ready=True,
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: BoschAlarmConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up bosch alarm sensors."""
panel = config_entry.runtime_data
unique_id = config_entry.unique_id or config_entry.entry_id
async_add_entities(
BoschAreaSensor(panel, area_id, unique_id, template)
for area_id in panel.areas
for template in SENSOR_TYPES
)
PARALLEL_UPDATES = 0
class BoschAreaSensor(BoschAlarmAreaEntity, SensorEntity):
"""An area sensor entity for a bosch alarm panel."""
entity_description: BoschAlarmSensorEntityDescription
def __init__(
self,
panel: Panel,
area_id: int,
unique_id: str,
entity_description: BoschAlarmSensorEntityDescription,
) -> None:
"""Set up an area sensor entity for a bosch alarm panel."""
super().__init__(
panel,
area_id,
unique_id,
entity_description.observe_alarms,
entity_description.observe_ready,
entity_description.observe_status,
)
self.entity_description = entity_description
self._attr_unique_id = f"{self._area_unique_id}_{entity_description.key}"
@property
def native_value(self) -> int:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self._area)

View File

@ -22,6 +22,18 @@
"installer_code": "The installer code from your panel",
"user_code": "The user code from your panel"
}
},
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"installer_code": "[%key:component::bosch_alarm::config::step::auth::data::installer_code%]",
"user_code": "[%key:component::bosch_alarm::config::step::auth::data::user_code%]"
},
"data_description": {
"password": "[%key:component::bosch_alarm::config::step::auth::data_description::password%]",
"installer_code": "[%key:component::bosch_alarm::config::step::auth::data_description::installer_code%]",
"user_code": "[%key:component::bosch_alarm::config::step::auth::data_description::user_code%]"
}
}
},
"error": {
@ -30,7 +42,26 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"device_mismatch": "Please ensure you reconfigure against the same device."
}
},
"exceptions": {
"cannot_connect": {
"message": "Could not connect to panel."
},
"authentication_failed": {
"message": "Incorrect credentials for panel."
}
},
"entity": {
"sensor": {
"faulting_points": {
"name": "Faulting points",
"unit_of_measurement": "points"
}
}
}
}

View File

@ -12,6 +12,7 @@ from buienradar.constants import (
CONDITION,
CONTENT,
DATA,
FEELTEMPERATURE,
FORECAST,
HUMIDITY,
MESSAGE,
@ -22,6 +23,7 @@ from buienradar.constants import (
TEMPERATURE,
VISIBILITY,
WINDAZIMUTH,
WINDGUST,
WINDSPEED,
)
from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url
@ -200,6 +202,14 @@ class BrData:
except (ValueError, TypeError):
return None
@property
def feeltemperature(self):
"""Return the feeltemperature, or None."""
try:
return float(self.data.get(FEELTEMPERATURE))
except (ValueError, TypeError):
return None
@property
def pressure(self):
"""Return the pressure, or None."""
@ -224,6 +234,14 @@ class BrData:
except (ValueError, TypeError):
return None
@property
def wind_gust(self):
"""Return the windgust, or None."""
try:
return float(self.data.get(WINDGUST))
except (ValueError, TypeError):
return None
@property
def wind_speed(self):
"""Return the windspeed, or None."""

View File

@ -9,6 +9,7 @@ from buienradar.constants import (
MAX_TEMP,
MIN_TEMP,
RAIN,
RAIN_CHANCE,
WINDAZIMUTH,
WINDSPEED,
)
@ -33,6 +34,7 @@ from homeassistant.components.weather import (
ATTR_FORECAST_NATIVE_TEMP,
ATTR_FORECAST_NATIVE_TEMP_LOW,
ATTR_FORECAST_NATIVE_WIND_SPEED,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
Forecast,
@ -153,7 +155,9 @@ class BrWeather(WeatherEntity):
)
self._attr_native_pressure = data.pressure
self._attr_native_temperature = data.temperature
self._attr_native_apparent_temperature = data.feeltemperature
self._attr_native_visibility = data.visibility
self._attr_native_wind_gust_speed = data.wind_gust
self._attr_native_wind_speed = data.wind_speed
self._attr_wind_bearing = data.wind_bearing
@ -188,6 +192,7 @@ class BrWeather(WeatherEntity):
ATTR_FORECAST_NATIVE_TEMP_LOW: data_in.get(MIN_TEMP),
ATTR_FORECAST_NATIVE_TEMP: data_in.get(MAX_TEMP),
ATTR_FORECAST_NATIVE_PRECIPITATION: data_in.get(RAIN),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: data_in.get(RAIN_CHANCE),
ATTR_FORECAST_WIND_BEARING: data_in.get(WINDAZIMUTH),
ATTR_FORECAST_NATIVE_WIND_SPEED: data_in.get(WINDSPEED),
}

View File

@ -74,7 +74,7 @@
},
"get_events": {
"name": "Get events",
"description": "Get events on a calendar within a time range.",
"description": "Retrieves events on a calendar within a time range.",
"fields": {
"start_date_time": {
"name": "Start time",

View File

@ -142,6 +142,12 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
@property
def media_artist(self) -> str | None:
"""Artist of current playing media, music track only."""
if (
not self.client.play_state.metadata.artist
and self.client.state.source == "IR"
):
# Return channel instead of artist when playing internet radio
return self.client.play_state.metadata.station
return self.client.play_state.metadata.artist
@property
@ -169,6 +175,11 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
"""Last time the media position was updated."""
return self.client.position_last_updated
@property
def media_channel(self) -> str | None:
"""Channel currently playing."""
return self.client.play_state.metadata.station
@property
def is_volume_muted(self) -> bool | None:
"""Volume mute status."""

View File

@ -2,18 +2,11 @@
from __future__ import annotations
from contextlib import suppress
import logging
from typing import TYPE_CHECKING, Literal, cast
with suppress(Exception):
# TurboJPEG imports numpy which may or may not work so
# we have to guard the import here. We still want
# to import it at top level so it gets loaded
# in the import executor and not in the event loop.
from turbojpeg import TurboJPEG
if TYPE_CHECKING:
from . import Image

View File

@ -8,46 +8,18 @@ from typing import Final
from canary.api import Api
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
from .const import (
CONF_FFMPEG_ARGUMENTS,
DEFAULT_FFMPEG_ARGUMENTS,
DEFAULT_TIMEOUT,
DOMAIN,
)
from .const import CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS, DEFAULT_TIMEOUT
from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator
_LOGGER: Final = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES: Final = timedelta(seconds=30)
CONFIG_SCHEMA: Final = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_TIMEOUT, default=DEFAULT_TIMEOUT
): cv.positive_int,
}
)
},
),
extra=vol.ALLOW_EXTRA,
)
PLATFORMS: Final[list[Platform]] = [
Platform.ALARM_CONTROL_PANEL,
Platform.CAMERA,
@ -55,37 +27,6 @@ PLATFORMS: Final[list[Platform]] = [
]
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Canary integration."""
if hass.config_entries.async_entries(DOMAIN):
return True
ffmpeg_arguments = DEFAULT_FFMPEG_ARGUMENTS
if CAMERA_DOMAIN in config:
camera_config = next(
(item for item in config[CAMERA_DOMAIN] if item["platform"] == DOMAIN),
None,
)
if camera_config:
ffmpeg_arguments = camera_config.get(
CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS
)
if DOMAIN in config:
if ffmpeg_arguments != DEFAULT_FFMPEG_ARGUMENTS:
config[DOMAIN][CONF_FFMPEG_ARGUMENTS] = ffmpeg_arguments
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config[DOMAIN],
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: CanaryConfigEntry) -> bool:
"""Set up Canary from a config entry."""
if not entry.options:

View File

@ -54,10 +54,6 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN):
"""Get the options flow for this handler."""
return CanaryOptionsFlowHandler()
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Handle a flow initiated by configuration file."""
return await self.async_step_user(import_data)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:

Some files were not shown because too many files have changed in this diff Show More