mirror of
https://github.com/home-assistant/core.git
synced 2025-07-28 07:37:34 +00:00
Merge branch 'dev' into zha_3phase_current_entities
This commit is contained in:
commit
9883c54725
@ -6,6 +6,7 @@ core: &core
|
||||
- homeassistant/helpers/**
|
||||
- homeassistant/package_constraints.txt
|
||||
- homeassistant/util/**
|
||||
- mypy.ini
|
||||
- pyproject.toml
|
||||
- requirements.txt
|
||||
- setup.cfg
|
||||
@ -131,6 +132,7 @@ tests: &tests
|
||||
- tests/components/conftest.py
|
||||
- tests/components/diagnostics/**
|
||||
- tests/components/history/**
|
||||
- tests/components/light/common.py
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/repairs/**
|
||||
|
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@ -517,7 +517,7 @@ jobs:
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
||||
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
|
26
.github/workflows/ci.yaml
vendored
26
.github/workflows/ci.yaml
vendored
@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@ -979,14 +979,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@ -1106,7 +1106,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1114,7 +1114,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1236,7 +1236,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1244,7 +1244,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1273,7 +1273,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@ -1378,14 +1378,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@ -1411,7 +1411,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.6
|
||||
uses: github/codeql-action/init@v3.28.0
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.6
|
||||
uses: github/codeql-action/analyze@v3.28.0
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
6
.github/workflows/wheels.yml
vendored
6
.github/workflows/wheels.yml
vendored
@ -79,7 +79,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@ -99,7 +99,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.1
|
||||
rev: v0.8.3
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@ -12,7 +12,7 @@ repos:
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
|
@ -137,6 +137,7 @@ homeassistant.components.co2signal.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
homeassistant.components.counter.*
|
||||
homeassistant.components.cover.*
|
||||
homeassistant.components.cpuspeed.*
|
||||
@ -169,6 +170,7 @@ homeassistant.components.easyenergy.*
|
||||
homeassistant.components.ecovacs.*
|
||||
homeassistant.components.ecowitt.*
|
||||
homeassistant.components.efergy.*
|
||||
homeassistant.components.eheimdigital.*
|
||||
homeassistant.components.electrasmart.*
|
||||
homeassistant.components.electric_kiwi.*
|
||||
homeassistant.components.elevenlabs.*
|
||||
@ -269,6 +271,7 @@ homeassistant.components.ios.*
|
||||
homeassistant.components.iotty.*
|
||||
homeassistant.components.ipp.*
|
||||
homeassistant.components.iqvia.*
|
||||
homeassistant.components.iron_os.*
|
||||
homeassistant.components.islamic_prayer_times.*
|
||||
homeassistant.components.isy994.*
|
||||
homeassistant.components.jellyfin.*
|
||||
@ -308,6 +311,7 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
homeassistant.components.media_source.*
|
||||
@ -360,6 +364,7 @@ homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@ -402,6 +407,7 @@ homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
homeassistant.components.rss_feed_template.*
|
||||
homeassistant.components.rtsp_to_webrtc.*
|
||||
homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
homeassistant.components.samsungtv.*
|
||||
|
23
CODEOWNERS
23
CODEOWNERS
@ -284,6 +284,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
/tests/components/coolmaster/ @OnFreund
|
||||
/homeassistant/components/counter/ @fabaff
|
||||
@ -385,6 +387,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
/tests/components/efergy/ @tkdrob
|
||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||
/homeassistant/components/eheimdigital/ @autinerd
|
||||
/tests/components/eheimdigital/ @autinerd
|
||||
/homeassistant/components/electrasmart/ @jafar-atili
|
||||
/tests/components/electrasmart/ @jafar-atili
|
||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||
@ -574,8 +578,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_tasks/ @allenporter
|
||||
/homeassistant/components/google_travel_time/ @eifinger
|
||||
/tests/components/google_travel_time/ @eifinger
|
||||
/homeassistant/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/tests/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/homeassistant/components/govee_ble/ @bdraco
|
||||
/tests/components/govee_ble/ @bdraco
|
||||
/homeassistant/components/govee_light_local/ @Galorhallen
|
||||
/tests/components/govee_light_local/ @Galorhallen
|
||||
/homeassistant/components/gpsd/ @fabaff @jrieger
|
||||
@ -727,8 +731,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/homeassistant/components/iotty/ @shapournemati-iotty
|
||||
/tests/components/iotty/ @shapournemati-iotty
|
||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||
/homeassistant/components/ipma/ @dgomes
|
||||
/tests/components/ipma/ @dgomes
|
||||
@ -1049,6 +1053,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
/homeassistant/components/ohme/ @dan-r
|
||||
/tests/components/ohme/ @dan-r
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
@ -1060,8 +1066,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz
|
||||
/tests/components/onkyo/ @arturpragacz
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
@ -1107,6 +1113,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
/tests/components/peco/ @IceBotYT
|
||||
/homeassistant/components/pegel_online/ @mib1185
|
||||
@ -1359,6 +1367,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||
/homeassistant/components/slide/ @ualex73
|
||||
/homeassistant/components/slide_local/ @dontinelli
|
||||
/tests/components/slide_local/ @dontinelli
|
||||
/homeassistant/components/slimproto/ @marcelveldt
|
||||
/tests/components/slimproto/ @marcelveldt
|
||||
/homeassistant/components/sma/ @kellerza @rklomp
|
||||
@ -1732,6 +1742,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/youless/ @gjong
|
||||
/homeassistant/components/youtube/ @joostlek
|
||||
/tests/components/youtube/ @joostlek
|
||||
/homeassistant/components/zabbix/ @kruton
|
||||
/homeassistant/components/zamg/ @killer0071234
|
||||
/tests/components/zamg/ @killer0071234
|
||||
/homeassistant/components/zengge/ @emontnemery
|
||||
|
@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.4
|
||||
RUN pip3 install uv==0.5.8
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.12
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
|
10
build.yaml
10
build.yaml
@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@ -115,7 +115,7 @@ class AuthManagerFlowManager(
|
||||
*,
|
||||
context: AuthFlowContext | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> LoginFlow:
|
||||
) -> LoginFlow[Any]:
|
||||
"""Create a login flow."""
|
||||
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
||||
if not auth_provider:
|
||||
|
@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
from typing import Any, Generic
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MultiFactorAuthModuleT = TypeVar(
|
||||
"_MultiFactorAuthModuleT",
|
||||
bound="MultiFactorAuthModule",
|
||||
default="MultiFactorAuthModule",
|
||||
)
|
||||
|
||||
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
@ -71,7 +78,7 @@ class MultiFactorAuthModule:
|
||||
"""Return a voluptuous schema to define mfa auth module's input."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -95,11 +102,14 @@ class MultiFactorAuthModule:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SetupFlow(data_entry_flow.FlowHandler):
|
||||
class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
|
||||
self,
|
||||
auth_module: _MultiFactorAuthModuleT,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str,
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
self._auth_module = auth_module
|
||||
|
@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
return sorted(unordered_services)
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> NotifySetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
await self.hass.services.async_call("notify", notify_service, data)
|
||||
|
||||
|
||||
class NotifySetupFlow(SetupFlow):
|
||||
class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow):
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user_id)
|
||||
# to fix typing complaint
|
||||
self._auth_module: NotifyAuthModule = auth_module
|
||||
self._available_notify_services = available_notify_services
|
||||
self._secret: str | None = None
|
||||
self._count: int | None = None
|
||||
|
@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
self._users[user_id] = ota_secret # type: ignore[index]
|
||||
return ota_secret
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> TotpSetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -174,10 +174,9 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
|
||||
|
||||
|
||||
class TotpSetupFlow(SetupFlow):
|
||||
class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
_auth_module: TotpAuthModule
|
||||
_ota_secret: str
|
||||
_url: str
|
||||
_image: str
|
||||
|
@ -5,8 +5,9 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
from typing import Any, Generic
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -46,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
@ -105,7 +108,7 @@ class AuthProvider:
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]:
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
@ -192,12 +195,15 @@ async def load_auth_provider_module(
|
||||
return module
|
||||
|
||||
|
||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||
class LoginFlow(
|
||||
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
Generic[_AuthProviderT],
|
||||
):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
||||
def __init__(self, auth_provider: AuthProvider) -> None:
|
||||
def __init__(self, auth_provider: _AuthProviderT) -> None:
|
||||
"""Initialize the login flow."""
|
||||
self._auth_provider = auth_provider
|
||||
self._auth_module_id: str | None = None
|
||||
|
@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> CommandLineLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return CommandLineLoginFlow(self)
|
||||
|
||||
@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
)
|
||||
|
||||
|
||||
class CommandLineLoginFlow(LoginFlow):
|
||||
class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow):
|
||||
if user_input is not None:
|
||||
user_input["username"] = user_input["username"].strip()
|
||||
try:
|
||||
await cast(
|
||||
CommandLineAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["username"], user_input["password"])
|
||||
await self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
|
@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
||||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider):
|
||||
pass
|
||||
|
||||
|
||||
class HassLoginFlow(LoginFlow):
|
||||
class HassLoginFlow(LoginFlow[HassAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
|
||||
await self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuth:
|
||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError):
|
||||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> ExampleLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return ExampleLoginFlow(self)
|
||||
|
||||
@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider):
|
||||
return UserMeta(name=name, is_active=True)
|
||||
|
||||
|
||||
class ExampleLoginFlow(LoginFlow):
|
||||
class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
cast(ExampleAuthProvider, self._auth_provider).async_validate_login(
|
||||
self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
|
@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
"""Trusted Networks auth provider does not support MFA."""
|
||||
return False
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> TrustedNetworksLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
self.async_validate_access(ip_address(remote_ip))
|
||||
|
||||
|
||||
class TrustedNetworksLoginFlow(LoginFlow):
|
||||
class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
def __init__(
|
||||
@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
||||
) -> AuthFlowResult:
|
||||
"""Handle the step of the form."""
|
||||
try:
|
||||
cast(
|
||||
TrustedNetworksAuthProvider, self._auth_provider
|
||||
).async_validate_access(self._ip_address)
|
||||
self._auth_provider.async_validate_access(self._ip_address)
|
||||
|
||||
except InvalidAuthError:
|
||||
return self.async_abort(reason="not_allowed")
|
||||
|
@ -1,6 +1,10 @@
|
||||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@ -14,7 +18,12 @@ import securetar
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
KEEP_BACKUPS = ("backups",)
|
||||
KEEP_DATABASE = (
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -24,6 +33,21 @@ class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
password: str | None
|
||||
remove_after_restore: bool
|
||||
restore_database: bool
|
||||
restore_homeassistant: bool
|
||||
|
||||
|
||||
def password_to_key(password: str) -> bytes:
|
||||
"""Generate a AES Key from password.
|
||||
|
||||
Matches the implementation in supervisor.backups.utils.password_to_key.
|
||||
"""
|
||||
key: bytes = password.encode()
|
||||
for _ in range(100):
|
||||
key = hashlib.sha256(key).digest()
|
||||
return key[:16]
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
@ -32,20 +56,27 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
backup_file_path=Path(instruction_content["path"]),
|
||||
password=instruction_content["password"],
|
||||
remove_after_restore=instruction_content["remove_after_restore"],
|
||||
restore_database=instruction_content["restore_database"],
|
||||
restore_homeassistant=instruction_content["restore_homeassistant"],
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
except (FileNotFoundError, KeyError, json.JSONDecodeError):
|
||||
return None
|
||||
finally:
|
||||
# Always remove the backup instruction file to prevent a boot loop
|
||||
instruction_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
|
||||
"""Delete all files and directories in the config directory except entries in the keep list."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in keep]
|
||||
entries_to_remove = sorted(
|
||||
entry for entry in config_dir.iterdir() if entry not in keep_paths
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
for entry in entries_to_remove:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
@ -54,12 +85,15 @@ def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
def _extract_backup(
|
||||
config_dir: Path,
|
||||
restore_content: RestoreBackupFileContent,
|
||||
) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
restore_content.backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
@ -88,22 +122,41 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
if restore_content.password is not None
|
||||
else None,
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
path=Path(tempdir, "homeassistant"),
|
||||
members=securetar.secure_path(istf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
if restore_content.restore_homeassistant:
|
||||
keep = list(KEEP_BACKUPS)
|
||||
if not restore_content.restore_database:
|
||||
keep.extend(KEEP_DATABASE)
|
||||
_clear_configuration_directory(config_dir, keep)
|
||||
shutil.copytree(
|
||||
Path(tempdir, "homeassistant", "data"),
|
||||
config_dir,
|
||||
dirs_exist_ok=True,
|
||||
ignore=shutil.ignore_patterns(*(keep)),
|
||||
)
|
||||
elif restore_content.restore_database:
|
||||
for entry in KEEP_DATABASE:
|
||||
entrypath = config_dir / entry
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
for entry in KEEP_DATABASE:
|
||||
shutil.copy(
|
||||
Path(tempdir, "homeassistant", "data", entry),
|
||||
config_dir,
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
@ -119,8 +172,13 @@ def restore_backup(config_dir_path: str) -> bool:
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
_extract_backup(
|
||||
config_dir=config_dir,
|
||||
restore_content=restore_content,
|
||||
)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
if restore_content.remove_after_restore:
|
||||
backup_file_path.unlink(missing_ok=True)
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
||||
|
@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If only cadata is passed, we can ignore it
|
||||
kwargs = mapped_args.get("kwargs")
|
||||
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class BlockingCall:
|
||||
"""Class to hold information about a blocking call."""
|
||||
@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=None,
|
||||
check_allowed=_check_load_verify_locations_call_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
|
@ -252,6 +252,7 @@ PRELOAD_STORAGE = [
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
"backup",
|
||||
]
|
||||
|
||||
|
||||
|
5
homeassistant/brands/slide.json
Normal file
5
homeassistant/brands/slide.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "slide",
|
||||
"name": "Slide",
|
||||
"integrations": ["slide", "slide_local"]
|
||||
}
|
@ -11,6 +11,8 @@ from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_HS_COLOR,
|
||||
DEFAULT_MAX_KELVIN,
|
||||
DEFAULT_MIN_KELVIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
@ -40,6 +42,8 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
|
||||
_device: Light
|
||||
_attr_name = None
|
||||
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
|
@ -25,5 +25,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ rules:
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""The AEMET OpenData component."""
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
|
||||
@ -10,8 +11,9 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .const import CONF_STATION_UPDATES, DOMAIN, PLATFORMS
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -29,6 +31,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
|
||||
options = ConnectionOptions(api_key, update_features)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||
aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"))
|
||||
|
||||
try:
|
||||
await aemet.select_coordinates(latitude, longitude)
|
||||
except TownNotFound as err:
|
||||
@ -57,3 +61,11 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
await hass.async_add_executor_job(
|
||||
shutil.rmtree,
|
||||
hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"),
|
||||
)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.3"]
|
||||
"requirements": ["AEMET-OpenData==0.6.4"]
|
||||
}
|
||||
|
@ -31,7 +31,9 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
@ -41,12 +43,16 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
@ -317,6 +317,7 @@ class Alexa(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -403,6 +404,7 @@ class AlexaPowerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -469,6 +471,7 @@ class AlexaLockController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -523,6 +526,7 @@ class AlexaSceneController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -562,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -611,6 +616,7 @@ class AlexaColorController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -669,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -715,6 +722,7 @@ class AlexaSpeaker(AlexaCapability):
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@ -772,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability):
|
||||
"es-ES",
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@ -801,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -859,6 +869,7 @@ class AlexaInputController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -1104,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -1245,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability):
|
||||
"fr-CA",
|
||||
"fr-FR",
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
"ja-JP",
|
||||
}
|
||||
|
||||
@ -1723,6 +1736,7 @@ class AlexaRangeController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2066,6 +2080,7 @@ class AlexaToggleController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2212,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2267,6 +2283,7 @@ class AlexaSeekController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2360,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2470,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
|
@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = (
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
)
|
||||
|
||||
|
@ -376,14 +376,14 @@ async def async_api_decrease_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process a decrease color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN])
|
||||
|
||||
value = min(max_mireds, current + 50)
|
||||
value = max(min_kelvin, current - 500)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@ -400,14 +400,14 @@ async def async_api_increase_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process an increase color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN])
|
||||
|
||||
value = max(min_mireds, current - 50)
|
||||
value = min(max_kelvin, current + 500)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@ -527,6 +527,7 @@ async def async_api_unlock(
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}:
|
||||
msg = (
|
||||
|
@ -41,7 +41,7 @@
|
||||
}
|
||||
},
|
||||
"enable_motion_recording": {
|
||||
"name": "Enables motion recording",
|
||||
"name": "Enable motion recording",
|
||||
"description": "Enables recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
@ -51,8 +51,8 @@
|
||||
}
|
||||
},
|
||||
"disable_motion_recording": {
|
||||
"name": "Disables motion recording",
|
||||
"description": "Disable recording a clip to camera storage when motion is detected.",
|
||||
"name": "Disable motion recording",
|
||||
"description": "Disables recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]",
|
||||
|
@ -11,12 +11,7 @@ from python_homeassistant_analytics import (
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
@ -25,6 +20,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
|
||||
from . import AnalyticsInsightsConfigEntry
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
@ -46,7 +42,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AnalyticsInsightsConfigEntry,
|
||||
) -> HomeassistantAnalyticsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return HomeassistantAnalyticsOptionsFlowHandler()
|
||||
|
100
homeassistant/components/analytics_insights/quality_scale.yaml
Normal file
100
homeassistant/components/analytics_insights/quality_scale.yaml
Normal file
@ -0,0 +1,100 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities with device classes.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: All the options of this integration are managed via the options flow
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@ -135,15 +135,16 @@ async def async_connect_androidtv(
|
||||
)
|
||||
|
||||
aftv = await async_androidtv_setup(
|
||||
config[CONF_HOST],
|
||||
config[CONF_PORT],
|
||||
adbkey,
|
||||
config.get(CONF_ADB_SERVER_IP),
|
||||
config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules,
|
||||
config[CONF_DEVICE_CLASS],
|
||||
timeout,
|
||||
signer,
|
||||
host=config[CONF_HOST],
|
||||
port=config[CONF_PORT],
|
||||
adbkey=adbkey,
|
||||
adb_server_ip=config.get(CONF_ADB_SERVER_IP),
|
||||
adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules=state_detection_rules,
|
||||
device_class=config[CONF_DEVICE_CLASS],
|
||||
auth_timeout_s=timeout,
|
||||
signer=signer,
|
||||
log_errors=False,
|
||||
)
|
||||
|
||||
if not aftv.available:
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.11"]
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)])
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)], True)
|
||||
|
||||
|
||||
class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
|
@ -16,6 +16,7 @@ import time
|
||||
from typing import Any, Literal, cast
|
||||
import wave
|
||||
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
@ -29,6 +30,7 @@ from homeassistant.components import (
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent
|
||||
@ -917,6 +919,11 @@ class PipelineRun:
|
||||
)
|
||||
except (asyncio.CancelledError, TimeoutError):
|
||||
raise # expected
|
||||
except hass_nabucasa.auth.Unauthenticated as src_error:
|
||||
raise SpeechToTextError(
|
||||
code="cloud-auth-failed",
|
||||
message="Home Assistant Cloud authentication failed",
|
||||
) from src_error
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during speech-to-text")
|
||||
raise SpeechToTextError(
|
||||
@ -1009,12 +1016,19 @@ class PipelineRun:
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_START,
|
||||
{
|
||||
"engine": self.intent_agent,
|
||||
"language": self.pipeline.conversation_language,
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
@ -1029,7 +1043,7 @@ class PipelineRun:
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=self.pipeline.language,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
@ -140,7 +140,7 @@ class VoiceCommandSegmenter:
|
||||
|
||||
self._timeout_seconds_left -= chunk_seconds
|
||||
if self._timeout_seconds_left <= 0:
|
||||
_LOGGER.warning(
|
||||
_LOGGER.debug(
|
||||
"VAD end of speech detection timed out after %s seconds",
|
||||
self.timeout_seconds,
|
||||
)
|
||||
|
@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
|
||||
}
|
||||
|
@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==63"],
|
||||
"requirements": ["axis==64"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
@ -5,36 +5,81 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN, LOGGER
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupAgentPlatformProtocol,
|
||||
LocalBackupAgent,
|
||||
)
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .http import async_register_http_views
|
||||
from .manager import BackupManager
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"ManagerBackup",
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Backup integration."""
|
||||
backup_manager = BackupManager(hass)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
|
||||
with_hassio = is_hassio(hass)
|
||||
|
||||
reader_writer: BackupReaderWriter
|
||||
if not with_hassio:
|
||||
reader_writer = CoreBackupReaderWriter(hass)
|
||||
else:
|
||||
# pylint: disable-next=import-outside-toplevel, hass-component-root-import
|
||||
from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter
|
||||
|
||||
reader_writer = SupervisorBackupReaderWriter(hass)
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
await backup_manager.async_setup()
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
if with_hassio:
|
||||
if DOMAIN in config:
|
||||
LOGGER.error(
|
||||
"The backup integration is not supported on this installation method, "
|
||||
"please remove it from your configuration"
|
||||
)
|
||||
return True
|
||||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
await backup_manager.async_create_backup()
|
||||
agent_id = list(backup_manager.local_backup_agents)[0]
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[agent_id],
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_database=True,
|
||||
include_folders=None,
|
||||
include_homeassistant=True,
|
||||
name=None,
|
||||
password=None,
|
||||
)
|
||||
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
if not with_hassio:
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
|
121
homeassistant/components/backup/agent.py
Normal file
121
homeassistant/components/backup/agent.py
Normal file
@ -0,0 +1,121 @@
|
||||
"""Backup agents for the Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from pathlib import Path
|
||||
from typing import Any, Protocol
|
||||
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .models import AgentBackup
|
||||
|
||||
|
||||
class BackupAgentError(HomeAssistantError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
"""Raised when the agent can't reach its API."""
|
||||
|
||||
_message = "The backup agent is unreachable."
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Backup agent interface."""
|
||||
|
||||
domain: str
|
||||
name: str
|
||||
|
||||
@cached_property
|
||||
def agent_id(self) -> str:
|
||||
"""Return the agent_id."""
|
||||
return f"{self.domain}.{self.name}"
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
|
||||
|
||||
class LocalBackupAgent(BackupAgent):
|
||||
"""Local backup agent."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup.
|
||||
|
||||
The method should return the path to the backup file with the specified id.
|
||||
"""
|
||||
|
||||
|
||||
class BackupAgentPlatformProtocol(Protocol):
|
||||
"""Define the format of backup platforms which implement backup agents."""
|
||||
|
||||
async def async_get_backup_agents(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
125
homeassistant/components/backup/backup.py
Normal file
125
homeassistant/components/backup/backup.py
Normal file
@ -0,0 +1,125 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import json
|
||||
from pathlib import Path
|
||||
from tarfile import TarError
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .util import read_backup
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return the local backup agent."""
|
||||
if is_hassio(hass):
|
||||
return []
|
||||
return [CoreLocalBackupAgent(hass)]
|
||||
|
||||
|
||||
class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
"""Local backup agent for Core and Container installations."""
|
||||
|
||||
domain = DOMAIN
|
||||
name = "local"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._backup_dir = Path(hass.config.path("backups"))
|
||||
self._backups: dict[str, AgentBackup] = {}
|
||||
self._loaded_backups = False
|
||||
|
||||
async def _load_backups(self) -> None:
|
||||
"""Load data of stored backup files."""
|
||||
backups = await self._hass.async_add_executor_job(self._read_backups)
|
||||
LOGGER.debug("Loaded %s local backups", len(backups))
|
||||
self._backups = backups
|
||||
self._loaded_backups = True
|
||||
|
||||
def _read_backups(self) -> dict[str, AgentBackup]:
|
||||
"""Read backups from disk."""
|
||||
backups: dict[str, AgentBackup] = {}
|
||||
for backup_path in self._backup_dir.glob("*.tar"):
|
||||
try:
|
||||
backup = read_backup(backup_path)
|
||||
backups[backup.backup_id] = backup
|
||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||
return backups
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
self._backups[backup.backup_id] = backup
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
return list(self._backups.values())
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if not (backup := self._backups.get(backup_id)):
|
||||
return None
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
LOGGER.debug(
|
||||
(
|
||||
"Removing tracked backup (%s) that does not exists on the expected"
|
||||
" path %s"
|
||||
),
|
||||
backup.backup_id,
|
||||
backup_path,
|
||||
)
|
||||
self._backups.pop(backup_id)
|
||||
return None
|
||||
|
||||
return backup
|
||||
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup."""
|
||||
return self._backup_dir / f"{backup_id}.tar"
|
||||
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup file."""
|
||||
if await self.async_get_backup(backup_id) is None:
|
||||
return
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
473
homeassistant/components/backup/config.py
Normal file
473
homeassistant/components/backup/config.py
Normal file
@ -0,0 +1,473 @@
|
||||
"""Provide persistent configuration for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
# The time of the automatic backup event should be compatible with
|
||||
# the time of the recorder's nightly job which runs at 04:12.
|
||||
# Run the backup at 04:45.
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_automatic_backup: str | None
|
||||
last_completed_automatic_backup: str | None
|
||||
retention: StoredRetentionConfig
|
||||
schedule: StoredBackupSchedule
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_automatic_backup: datetime | None = None
|
||||
last_completed_automatic_backup: datetime | None = None
|
||||
retention: RetentionConfig
|
||||
schedule: BackupSchedule
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: StoredBackupConfig) -> Self:
|
||||
"""Initialize backup config data from a dict."""
|
||||
include_folders_data = data["create_backup"]["include_folders"]
|
||||
if include_folders_data:
|
||||
include_folders = [Folder(folder) for folder in include_folders_data]
|
||||
else:
|
||||
include_folders = None
|
||||
retention = data["retention"]
|
||||
|
||||
if last_attempted_str := data["last_attempted_automatic_backup"]:
|
||||
last_attempted = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if last_attempted_str := data["last_completed_automatic_backup"]:
|
||||
last_completed = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return cls(
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
include_addons=data["create_backup"]["include_addons"],
|
||||
include_all_addons=data["create_backup"]["include_all_addons"],
|
||||
include_database=data["create_backup"]["include_database"],
|
||||
include_folders=include_folders,
|
||||
name=data["create_backup"]["name"],
|
||||
password=data["create_backup"]["password"],
|
||||
),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
retention=RetentionConfig(
|
||||
copies=retention["copies"],
|
||||
days=retention["days"],
|
||||
),
|
||||
schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])),
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupConfig:
|
||||
"""Convert backup config data to a dict."""
|
||||
if self.last_attempted_automatic_backup:
|
||||
last_attempted = self.last_attempted_automatic_backup.isoformat()
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if self.last_completed_automatic_backup:
|
||||
last_completed = self.last_completed_automatic_backup.isoformat()
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return StoredBackupConfig(
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
retention=self.retention.to_dict(),
|
||||
schedule=self.schedule.to_dict(),
|
||||
)
|
||||
|
||||
|
||||
class BackupConfig:
|
||||
"""Handle backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize backup config."""
|
||||
self.data = BackupConfigData(
|
||||
create_backup=CreateBackupConfig(),
|
||||
retention=RetentionConfig(),
|
||||
schedule=BackupSchedule(),
|
||||
)
|
||||
self._manager = manager
|
||||
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
self,
|
||||
*,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleState | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update config."""
|
||||
if create_backup is not UNDEFINED:
|
||||
self.data.create_backup = replace(self.data.create_backup, **create_backup)
|
||||
if retention is not UNDEFINED:
|
||||
new_retention = RetentionConfig(**retention)
|
||||
if new_retention != self.data.retention:
|
||||
self.data.retention = new_retention
|
||||
self.data.retention.apply(self._manager)
|
||||
if schedule is not UNDEFINED:
|
||||
new_schedule = BackupSchedule(state=schedule)
|
||||
if new_schedule.to_dict() != self.data.schedule.to_dict():
|
||||
self.data.schedule = new_schedule
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
self._manager.store.save()
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next delete after days."""
|
||||
self._unschedule_next(manager)
|
||||
|
||||
async def _delete_backups(now: datetime) -> None:
|
||||
"""Delete backups older than days."""
|
||||
self._schedule_next(manager)
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if self.days is None:
|
||||
return {}
|
||||
now = dt_util.utcnow()
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
}
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
|
||||
manager.remove_next_delete_event = async_call_later(
|
||||
manager.hass, timedelta(days=1), _delete_backups
|
||||
)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next delete after days."""
|
||||
if (remove_next_event := manager.remove_next_delete_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_delete_event = None
|
||||
|
||||
|
||||
class StoredRetentionConfig(TypedDict):
|
||||
"""Represent the stored backup retention configuration."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class RetentionParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for retention."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
state: ScheduleState
|
||||
|
||||
|
||||
class ScheduleState(StrEnum):
|
||||
"""Represent the schedule state."""
|
||||
|
||||
NEVER = "never"
|
||||
DAILY = "daily"
|
||||
MONDAY = "mon"
|
||||
TUESDAY = "tue"
|
||||
WEDNESDAY = "wed"
|
||||
THURSDAY = "thu"
|
||||
FRIDAY = "fri"
|
||||
SATURDAY = "sat"
|
||||
SUNDAY = "sun"
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupSchedule:
|
||||
"""Represent the backup schedule."""
|
||||
|
||||
state: ScheduleState = ScheduleState.NEVER
|
||||
cron_event: CronSim | None = field(init=False, default=None)
|
||||
|
||||
@callback
|
||||
def apply(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Apply a new schedule.
|
||||
|
||||
There are only three possible state types: never, daily, or weekly.
|
||||
"""
|
||||
if self.state is ScheduleState.NEVER:
|
||||
self._unschedule_next(manager)
|
||||
return
|
||||
|
||||
if self.state is ScheduleState.DAILY:
|
||||
self._schedule_next(CRON_PATTERN_DAILY, manager)
|
||||
else:
|
||||
self._schedule_next(
|
||||
CRON_PATTERN_WEEKLY.format(self.state.value),
|
||||
manager,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
cron_pattern: str,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next backup."""
|
||||
self._unschedule_next(manager)
|
||||
now = dt_util.now()
|
||||
if (cron_event := self.cron_event) is None:
|
||||
seed_time = manager.config.data.last_completed_automatic_backup or now
|
||||
cron_event = self.cron_event = CronSim(cron_pattern, seed_time)
|
||||
next_time = next(cron_event)
|
||||
|
||||
if next_time < now:
|
||||
# schedule a backup at next daily time once
|
||||
# if we missed the last scheduled backup
|
||||
cron_event = CronSim(CRON_PATTERN_DAILY, now)
|
||||
next_time = next(cron_event)
|
||||
# reseed the cron event attribute
|
||||
# add a day to the next time to avoid scheduling at the same time again
|
||||
self.cron_event = CronSim(cron_pattern, now + timedelta(days=1))
|
||||
|
||||
async def _create_backup(now: datetime) -> None:
|
||||
"""Create backup."""
|
||||
manager.remove_next_backup_event = None
|
||||
config_data = manager.config.data
|
||||
self._schedule_next(cron_pattern, manager)
|
||||
|
||||
# create the backup
|
||||
try:
|
||||
await manager.async_create_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupSchedule:
|
||||
"""Convert backup schedule to a dict."""
|
||||
return StoredBackupSchedule(state=self.state)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next backup."""
|
||||
if (remove_next_event := manager.remove_next_backup_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_backup_event = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class CreateBackupConfig:
|
||||
"""Represent the config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str] = field(default_factory=list)
|
||||
include_addons: list[str] | None = None
|
||||
include_all_addons: bool = False
|
||||
include_database: bool = True
|
||||
include_folders: list[Folder] | None = None
|
||||
name: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
def to_dict(self) -> StoredCreateBackupConfig:
|
||||
"""Convert create backup config to a dict."""
|
||||
return {
|
||||
"agent_ids": self.agent_ids,
|
||||
"include_addons": self.include_addons,
|
||||
"include_all_addons": self.include_all_addons,
|
||||
"include_database": self.include_database,
|
||||
"include_folders": self.include_folders,
|
||||
"name": self.name,
|
||||
"password": self.password,
|
||||
}
|
||||
|
||||
|
||||
class StoredCreateBackupConfig(TypedDict):
|
||||
"""Represent the stored config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
class CreateBackupParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
async def _delete_filtered_backups(
|
||||
manager: BackupManager,
|
||||
backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
) -> None:
|
||||
"""Delete backups parsed with a filter.
|
||||
|
||||
:param manager: The backup manager.
|
||||
:param backup_filter: A filter that should return the backups to delete.
|
||||
"""
|
||||
backups, get_agent_errors = await manager.async_get_backups()
|
||||
if get_agent_errors:
|
||||
LOGGER.debug(
|
||||
"Error getting backups; continuing anyway: %s",
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# only delete backups that are created with the saved automatic settings
|
||||
backups = {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.with_automatic_settings
|
||||
}
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
|
||||
filtered_backups = backup_filter(backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
filtered_backups = dict(
|
||||
sorted(
|
||||
filtered_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
)
|
||||
|
||||
if len(filtered_backups) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = filtered_backups.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", filtered_backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
backup_ids = list(filtered_backups)
|
||||
delete_results = await asyncio.gather(
|
||||
*(manager.async_delete_backup(backup_id) for backup_id in filtered_backups)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
"Error deleting old copies: %s",
|
||||
agent_errors,
|
||||
)
|
||||
|
||||
|
||||
async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None:
|
||||
"""Delete backups exceeding the configured retention count."""
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if manager.config.data.retention.copies is None:
|
||||
return {}
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
@ -10,6 +10,7 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
DOMAIN = "backup"
|
||||
DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
@ -22,6 +23,12 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
"backups/*.tar",
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
]
|
||||
|
@ -8,10 +8,11 @@ from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
@ -27,30 +28,47 @@ def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/download/{slug}"
|
||||
url = "/api/backup/download/{backup_id}"
|
||||
name = "api:backup:download"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: Request,
|
||||
slug: str,
|
||||
) -> FileResponse | Response:
|
||||
backup_id: str,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
"""Download a backup file."""
|
||||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
try:
|
||||
agent_id = request.query.getone("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
||||
# that
|
||||
if backup is None:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
return FileResponse(
|
||||
path=backup.path.as_posix(),
|
||||
headers={
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
return FileResponse(path=path.as_posix(), headers=headers)
|
||||
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
response = StreamResponse(status=HTTPStatus.OK, headers=headers)
|
||||
await response.prepare(request)
|
||||
async for chunk in stream:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
@ -62,15 +80,24 @@ class UploadBackupView(HomeAssistantView):
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
try:
|
||||
agent_ids = request.query.getall("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
await manager.async_receive_backup(contents=contents, agent_ids=agent_ids)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file {err}",
|
||||
body=f"Can't write backup file: {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return Response(
|
||||
body=f"Can't upload backup file: {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,12 @@
|
||||
{
|
||||
"domain": "backup",
|
||||
"name": "Backup",
|
||||
"after_dependencies": ["hassio"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2024.11.0"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2024.11.0"]
|
||||
}
|
||||
|
69
homeassistant/components/backup/models.py
Normal file
69
homeassistant/components/backup/models.py
Normal file
@ -0,0 +1,69 @@
|
||||
"""Models for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
"""Addon information."""
|
||||
|
||||
name: str
|
||||
slug: str
|
||||
version: str
|
||||
|
||||
|
||||
class Folder(StrEnum):
|
||||
"""Folder type."""
|
||||
|
||||
SHARE = "share"
|
||||
ADDONS = "addons/local"
|
||||
SSL = "ssl"
|
||||
MEDIA = "media"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup:
|
||||
"""Base backup class."""
|
||||
|
||||
addons: list[AddonInfo]
|
||||
backup_id: str
|
||||
date: str
|
||||
database_included: bool
|
||||
extra_metadata: dict[str, bool | str]
|
||||
folders: list[Folder]
|
||||
homeassistant_included: bool
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
protected: bool
|
||||
size: int
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return a dict representation of this backup."""
|
||||
return asdict(self)
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> Self:
|
||||
"""Create an instance from a JSON serialization."""
|
||||
return cls(
|
||||
addons=[AddonInfo(**addon) for addon in data["addons"]],
|
||||
backup_id=data["backup_id"],
|
||||
date=data["date"],
|
||||
database_included=data["database_included"],
|
||||
extra_metadata=data["extra_metadata"],
|
||||
folders=[Folder(folder) for folder in data["folders"]],
|
||||
homeassistant_included=data["homeassistant_included"],
|
||||
homeassistant_version=data["homeassistant_version"],
|
||||
name=data["name"],
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
52
homeassistant/components/backup/store.py
Normal file
52
homeassistant/components/backup/store.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""Store backup configuration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import StoredBackupConfig
|
||||
from .manager import BackupManager, StoredKnownBackup
|
||||
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
backups: list[StoredKnownBackup]
|
||||
config: StoredBackupConfig
|
||||
|
||||
|
||||
class BackupStore:
|
||||
"""Store backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
|
||||
async def load(self) -> StoredBackupData | None:
|
||||
"""Load the store."""
|
||||
return await self._store.async_load()
|
||||
|
||||
@callback
|
||||
def save(self) -> None:
|
||||
"""Save config."""
|
||||
self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> StoredBackupData:
|
||||
"""Return data to save."""
|
||||
return {
|
||||
"backups": self._manager.known_backups.to_list(),
|
||||
"config": self._manager.config.data.to_dict(),
|
||||
}
|
@ -1,4 +1,14 @@
|
||||
{
|
||||
"issues": {
|
||||
"automatic_backup_failed_create": {
|
||||
"title": "Automatic backup could not be created",
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"name": "Create backup",
|
||||
|
148
homeassistant/components/backup/util.py
Normal file
148
homeassistant/components/backup/util.py
Normal file
@ -0,0 +1,148 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile
|
||||
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def read_backup(backup_path: Path) -> AgentBackup:
|
||||
"""Read a backup from disk."""
|
||||
|
||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
if not (data_file := backup_file.extractfile("./backup.json")):
|
||||
raise KeyError("backup.json not found in tar file")
|
||||
data = json_loads_object(data_file.read())
|
||||
addons = [
|
||||
AddonInfo(
|
||||
name=cast(str, addon["name"]),
|
||||
slug=cast(str, addon["slug"]),
|
||||
version=cast(str, addon["version"]),
|
||||
)
|
||||
for addon in cast(list[JsonObjectType], data.get("addons", []))
|
||||
]
|
||||
|
||||
folders = [
|
||||
Folder(folder)
|
||||
for folder in cast(list[str], data.get("folders", []))
|
||||
if folder != "homeassistant"
|
||||
]
|
||||
|
||||
homeassistant_included = False
|
||||
homeassistant_version: str | None = None
|
||||
database_included = False
|
||||
if (
|
||||
homeassistant := cast(JsonObjectType, data.get("homeassistant"))
|
||||
) and "version" in homeassistant:
|
||||
homeassistant_included = True
|
||||
homeassistant_version = cast(str, homeassistant["version"])
|
||||
database_included = not cast(
|
||||
bool, homeassistant.get("exclude_database", False)
|
||||
)
|
||||
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=cast(str, data["slug"]),
|
||||
database_included=database_included,
|
||||
date=cast(str, data["date"]),
|
||||
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
|
||||
folders=folders,
|
||||
homeassistant_included=homeassistant_included,
|
||||
homeassistant_version=homeassistant_version,
|
||||
name=cast(str, data["name"]),
|
||||
protected=cast(bool, data.get("protected", False)),
|
||||
size=backup_path.stat().st_size,
|
||||
)
|
||||
|
||||
|
||||
def validate_password(path: Path, password: str | None) -> bool:
|
||||
"""Validate the password."""
|
||||
with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
compressed = False
|
||||
ha_tar_name = "homeassistant.tar"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
compressed = True
|
||||
ha_tar_name = "homeassistant.tar.gz"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
LOGGER.error("No homeassistant.tar or homeassistant.tar.gz found")
|
||||
return False
|
||||
try:
|
||||
with SecureTarFile(
|
||||
path, # Not used
|
||||
gzip=compressed,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=ha_tar,
|
||||
):
|
||||
# If we can read the tar file, the password is correct
|
||||
return True
|
||||
except tarfile.ReadError:
|
||||
LOGGER.debug("Invalid password")
|
||||
return False
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected error validating password")
|
||||
return False
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
"""Receive a file from a stream and write it to a file."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue()
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with path.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
@ -7,22 +7,31 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config import ScheduleState
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import IncorrectPasswordError, ManagerStateEvent
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, backup_agents_info)
|
||||
|
||||
if with_hassio:
|
||||
websocket_api.async_register_command(hass, handle_backup_end)
|
||||
websocket_api.async_register_command(hass, handle_backup_start)
|
||||
return
|
||||
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_remove)
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@ -35,12 +44,16 @@ async def handle_info(
|
||||
) -> None:
|
||||
"""List all stored backups."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
backups = await manager.async_get_backups()
|
||||
backups, agent_errors = await manager.async_get_backups()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"backing_up": manager.backing_up,
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
},
|
||||
)
|
||||
|
||||
@ -49,7 +62,7 @@ async def handle_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/details",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("backup_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@ -58,12 +71,17 @@ async def handle_details(
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get backup details for a specific slug."""
|
||||
backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"])
|
||||
"""Get backup details for a specific backup."""
|
||||
backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup(
|
||||
msg["backup_id"]
|
||||
)
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"backup": backup,
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
},
|
||||
)
|
||||
|
||||
@ -71,26 +89,39 @@ async def handle_details(
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/remove",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("type"): "backup/delete",
|
||||
vol.Required("backup_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_remove(
|
||||
async def handle_delete(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Remove a backup."""
|
||||
await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
"""Delete a backup."""
|
||||
agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"])
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/restore",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("restore_homeassistant", default=True): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@ -100,12 +131,36 @@ async def handle_restore(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
try:
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
restore_addons=msg.get("restore_addons"),
|
||||
restore_database=msg["restore_database"],
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/generate"})
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate",
|
||||
vol.Required("agent_ids"): [str],
|
||||
vol.Optional("include_addons"): [str],
|
||||
vol.Optional("include_all_addons", default=False): bool,
|
||||
vol.Optional("include_database", default=True): bool,
|
||||
vol.Optional("include_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("include_homeassistant", default=True): bool,
|
||||
vol.Optional("name"): str,
|
||||
vol.Optional("password"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create(
|
||||
hass: HomeAssistant,
|
||||
@ -113,7 +168,46 @@ async def handle_create(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup."""
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup()
|
||||
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=msg["agent_ids"],
|
||||
include_addons=msg.get("include_addons"),
|
||||
include_all_addons=msg["include_all_addons"],
|
||||
include_database=msg["include_database"],
|
||||
include_folders=msg.get("include_folders"),
|
||||
include_homeassistant=msg["include_homeassistant"],
|
||||
name=msg.get("name"),
|
||||
password=msg.get("password"),
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate_with_automatic_settings",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create_with_automatic_settings(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup with stored settings."""
|
||||
|
||||
config_data = hass.data[DATA_MANAGER].config.data
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@ -127,7 +221,6 @@ async def handle_backup_start(
|
||||
) -> None:
|
||||
"""Backup start notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = True
|
||||
LOGGER.debug("Backup start notification")
|
||||
|
||||
try:
|
||||
@ -149,7 +242,6 @@ async def handle_backup_end(
|
||||
) -> None:
|
||||
"""Backup end notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = False
|
||||
LOGGER.debug("Backup end notification")
|
||||
|
||||
try:
|
||||
@ -159,3 +251,101 @@ async def handle_backup_end(
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"})
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return backup agents info."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"})
|
||||
@websocket_api.async_response
|
||||
async def handle_config_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"config": manager.config.data.to_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
|
||||
vol.Optional("include_addons"): vol.Any(
|
||||
vol.All([str], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("include_all_addons"): bool,
|
||||
vol.Optional("include_database"): bool,
|
||||
vol.Optional("include_folders"): vol.Any(
|
||||
vol.All([vol.Coerce(Folder)], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("name"): vol.Any(str, None),
|
||||
vol.Optional("password"): vol.Any(str, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("retention"): vol.Schema(
|
||||
{
|
||||
vol.Optional("copies"): vol.Any(int, None),
|
||||
vol.Optional("days"): vol.Any(int, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_config_update(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
changes = dict(msg)
|
||||
changes.pop("id")
|
||||
changes.pop("type")
|
||||
await manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
@ -137,7 +137,7 @@ VALID_MEDIA_TYPES: Final[tuple] = (
|
||||
# Fallback sources to use in case of API failure.
|
||||
FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
items=[
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="uriStreamer",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -145,7 +145,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="uriStreamer"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="bluetooth",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -153,7 +153,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="bluetooth"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="spotify",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -161,7 +161,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="spotify"),
|
||||
is_seekable=True,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="lineIn",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -169,7 +169,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="lineIn"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="spdif",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -177,7 +177,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="spdif"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="netRadio",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -185,7 +185,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="netRadio"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="deezer",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
@ -193,7 +193,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
type=SourceTypeEnum(value="deezer"),
|
||||
is_seekable=True,
|
||||
),
|
||||
Source( # type: ignore[call-arg]
|
||||
Source(
|
||||
id="tidalConnect",
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
|
@ -42,7 +42,7 @@ class BangOlufsenBase:
|
||||
|
||||
# Objects that get directly updated by notifications.
|
||||
self._playback_metadata: PlaybackContentMetadata = PlaybackContentMetadata()
|
||||
self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) # type: ignore[call-arg]
|
||||
self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0)
|
||||
self._playback_source: Source = Source()
|
||||
self._playback_state: RenderingState = RenderingState()
|
||||
self._source_change: Source = Source()
|
||||
|
@ -210,9 +210,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Misc. variables.
|
||||
self._audio_sources: dict[str, str] = {}
|
||||
self._media_image: Art = Art()
|
||||
self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( # type: ignore[call-arg]
|
||||
self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus(
|
||||
software_version="",
|
||||
state=SoftwareUpdateState(seconds_remaining=0, value="idle"), # type: ignore[call-arg]
|
||||
state=SoftwareUpdateState(seconds_remaining=0, value="idle"),
|
||||
)
|
||||
self._sources: dict[str, str] = {}
|
||||
self._state: str = MediaPlayerState.IDLE
|
||||
@ -896,9 +896,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
elif media_type == BangOlufsenMediaType.RADIO:
|
||||
await self._client.run_provided_scene(
|
||||
scene_properties=SceneProperties( # type: ignore[call-arg]
|
||||
scene_properties=SceneProperties(
|
||||
action_list=[
|
||||
Action( # type: ignore[call-arg]
|
||||
Action(
|
||||
type="radio",
|
||||
radio_station_id=media_id,
|
||||
)
|
||||
@ -919,7 +919,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
deezer_id = kwargs[ATTR_MEDIA_EXTRA]["id"]
|
||||
|
||||
await self._client.start_deezer_flow(
|
||||
user_flow=UserFlow(user_id=deezer_id) # type: ignore[call-arg]
|
||||
user_flow=UserFlow(user_id=deezer_id)
|
||||
)
|
||||
|
||||
# Play a playlist or album.
|
||||
@ -929,7 +929,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
start_from = kwargs[ATTR_MEDIA_EXTRA]["start_from"]
|
||||
|
||||
await self._client.add_to_queue(
|
||||
play_queue_item=PlayQueueItem( # type: ignore[call-arg]
|
||||
play_queue_item=PlayQueueItem(
|
||||
provider=PlayQueueItemType(value=media_type),
|
||||
start_now_from_position=start_from,
|
||||
type="playlist",
|
||||
@ -940,7 +940,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Play a track.
|
||||
else:
|
||||
await self._client.add_to_queue(
|
||||
play_queue_item=PlayQueueItem( # type: ignore[call-arg]
|
||||
play_queue_item=PlayQueueItem(
|
||||
provider=PlayQueueItemType(value=media_type),
|
||||
start_now_from_position=0,
|
||||
type="track",
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN
|
||||
from homeassistant.const import CONF_PIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@ -13,11 +13,6 @@ from homeassistant.helpers import config_validation as cv
|
||||
from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN
|
||||
from .coordinator import BlinkConfigEntry
|
||||
|
||||
SERVICE_UPDATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
|
@ -84,16 +84,16 @@
|
||||
}
|
||||
},
|
||||
"send_pin": {
|
||||
"name": "Send pin",
|
||||
"description": "Sends a new PIN to blink for 2FA.",
|
||||
"name": "Send PIN",
|
||||
"description": "Sends a new PIN to Blink for 2FA.",
|
||||
"fields": {
|
||||
"pin": {
|
||||
"name": "Pin",
|
||||
"description": "PIN received from blink. Leave empty if you only received a verification email."
|
||||
"name": "PIN",
|
||||
"description": "PIN received from Blink. Leave empty if you only received a verification email."
|
||||
},
|
||||
"config_entry_id": {
|
||||
"name": "Integration ID",
|
||||
"description": "The Blink Integration id."
|
||||
"description": "The Blink Integration ID."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@ -36,7 +35,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = []
|
||||
setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluesound",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyblu==1.0.4"],
|
||||
"requirements": ["pyblu==2.0.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_musc._tcp.local."
|
||||
|
@ -28,18 +28,26 @@ from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
|
||||
from .utils import format_unique_id
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import BluesoundConfigEntry
|
||||
@ -51,6 +59,11 @@ SCAN_INTERVAL = timedelta(minutes=15)
|
||||
DATA_BLUESOUND = DOMAIN
|
||||
DEFAULT_PORT = 11000
|
||||
|
||||
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = 180
|
||||
NODE_RETRY_INITIATION = timedelta(minutes=3)
|
||||
|
||||
@ -130,6 +143,18 @@ async def async_setup_entry(
|
||||
config_entry.runtime_data.sync_status,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SET_TIMER, None, "async_increase_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_CLEAR_TIMER, None, "async_clear_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
hass.data[DATA_BLUESOUND].append(bluesound_player)
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
@ -175,13 +200,12 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self._status: Status | None = None
|
||||
self._inputs: list[Input] = []
|
||||
self._presets: list[Preset] = []
|
||||
self._muted = False
|
||||
self._master: BluesoundPlayer | None = None
|
||||
self._is_master = False
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._is_leader = False
|
||||
self._leader: BluesoundPlayer | None = None
|
||||
|
||||
self._attr_unique_id = format_unique_id(sync_status.mac, port)
|
||||
# there should always be one player with the default port per mac
|
||||
@ -250,6 +274,22 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
|
||||
assert self._sync_status.id is not None
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
dispatcher_join_signal(self.entity_id),
|
||||
self.async_add_follower,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
dispatcher_unjoin_signal(self._sync_status.id),
|
||||
self.async_remove_follower,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop the polling task."""
|
||||
await super().async_will_remove_from_hass()
|
||||
@ -317,25 +357,25 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
|
||||
if sync_status.master is not None:
|
||||
self._is_master = False
|
||||
master_id = f"{sync_status.master.ip}:{sync_status.master.port}"
|
||||
master_device = [
|
||||
if sync_status.leader is not None:
|
||||
self._is_leader = False
|
||||
leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}"
|
||||
leader_device = [
|
||||
device
|
||||
for device in self.hass.data[DATA_BLUESOUND]
|
||||
if device.id == master_id
|
||||
if device.id == leader_id
|
||||
]
|
||||
|
||||
if master_device and master_id != self.id:
|
||||
self._master = master_device[0]
|
||||
if leader_device and leader_id != self.id:
|
||||
self._leader = leader_device[0]
|
||||
else:
|
||||
self._master = None
|
||||
_LOGGER.error("Master not found %s", master_id)
|
||||
self._leader = None
|
||||
_LOGGER.error("Leader not found %s", leader_id)
|
||||
else:
|
||||
if self._master is not None:
|
||||
self._master = None
|
||||
slaves = self._sync_status.slaves
|
||||
self._is_master = slaves is not None
|
||||
if self._leader is not None:
|
||||
self._leader = None
|
||||
followers = self._sync_status.followers
|
||||
self._is_leader = followers is not None
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@ -355,7 +395,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self._status is None:
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
match self._status.state:
|
||||
@ -369,7 +409,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.name
|
||||
@ -380,7 +420,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self._status is None:
|
||||
return None
|
||||
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return self._group_name
|
||||
|
||||
return self._status.artist
|
||||
@ -388,7 +428,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_album_name(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.album
|
||||
@ -396,7 +436,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Image url of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
url = self._status.image
|
||||
@ -411,7 +451,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_position(self) -> int | None:
|
||||
"""Position of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
mediastate = self.state
|
||||
@ -430,7 +470,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_duration(self) -> int | None:
|
||||
"""Duration of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
duration = self._status.total_seconds
|
||||
@ -489,7 +529,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""List of available input sources."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.text for x in self._inputs]
|
||||
@ -500,7 +540,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Name of the current input source."""
|
||||
if self._status is None or (self.is_grouped and not self.is_master):
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
if self._status.input_id is not None:
|
||||
@ -520,7 +560,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self._status is None:
|
||||
return MediaPlayerEntityFeature(0)
|
||||
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return (
|
||||
MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
@ -560,14 +600,17 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
return supported
|
||||
|
||||
@property
|
||||
def is_master(self) -> bool:
|
||||
"""Return true if player is a coordinator."""
|
||||
return self._is_master
|
||||
def is_leader(self) -> bool:
|
||||
"""Return true if player is leader of a group."""
|
||||
return self._sync_status.followers is not None
|
||||
|
||||
@property
|
||||
def is_grouped(self) -> bool:
|
||||
"""Return true if player is a coordinator."""
|
||||
return self._master is not None or self._is_master
|
||||
"""Return true if player is member or leader of a group."""
|
||||
return (
|
||||
self._sync_status.followers is not None
|
||||
or self._sync_status.leader is not None
|
||||
)
|
||||
|
||||
@property
|
||||
def shuffle(self) -> bool:
|
||||
@ -580,25 +623,25 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_join(self, master: str) -> None:
|
||||
"""Join the player to a group."""
|
||||
master_device = [
|
||||
device
|
||||
for device in self.hass.data[DATA_BLUESOUND]
|
||||
if device.entity_id == master
|
||||
]
|
||||
if master == self.entity_id:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
if len(master_device) > 0:
|
||||
if self.id == master_device[0].id:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
_LOGGER.debug("Trying to join player: %s", self.id)
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_join_signal(master), self.host, self.port
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Trying to join player: %s to master: %s",
|
||||
self.id,
|
||||
master_device[0].id,
|
||||
)
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Unjoin the player from a group."""
|
||||
if self._sync_status.leader is None:
|
||||
return
|
||||
|
||||
await master_device[0].async_add_slave(self)
|
||||
else:
|
||||
_LOGGER.error("Master not found %s", master_device)
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
|
||||
_LOGGER.debug("Trying to unjoin player: %s", self.id)
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
@ -607,31 +650,31 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self._group_list:
|
||||
attributes = {ATTR_BLUESOUND_GROUP: self._group_list}
|
||||
|
||||
attributes[ATTR_MASTER] = self._is_master
|
||||
attributes[ATTR_MASTER] = self.is_leader
|
||||
|
||||
return attributes
|
||||
|
||||
def rebuild_bluesound_group(self) -> list[str]:
|
||||
"""Rebuild the list of entities in speaker group."""
|
||||
if self.sync_status.master is None and self.sync_status.slaves is None:
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return []
|
||||
|
||||
player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND]
|
||||
|
||||
leader_sync_status: SyncStatus | None = None
|
||||
if self.sync_status.master is None:
|
||||
if self.sync_status.leader is None:
|
||||
leader_sync_status = self.sync_status
|
||||
else:
|
||||
required_id = f"{self.sync_status.master.ip}:{self.sync_status.master.port}"
|
||||
required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for x in player_entities:
|
||||
if x.sync_status.id == required_id:
|
||||
leader_sync_status = x.sync_status
|
||||
break
|
||||
|
||||
if leader_sync_status is None or leader_sync_status.slaves is None:
|
||||
if leader_sync_status is None or leader_sync_status.followers is None:
|
||||
return []
|
||||
|
||||
follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.slaves]
|
||||
follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers]
|
||||
follower_names = [
|
||||
x.sync_status.name
|
||||
for x in player_entities
|
||||
@ -640,21 +683,13 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
"""Unjoin the player from a group."""
|
||||
if self._master is None:
|
||||
return
|
||||
async def async_add_follower(self, host: str, port: int) -> None:
|
||||
"""Add follower to leader."""
|
||||
await self._player.add_follower(host, port)
|
||||
|
||||
_LOGGER.debug("Trying to unjoin player: %s", self.id)
|
||||
await self._master.async_remove_slave(self)
|
||||
|
||||
async def async_add_slave(self, slave_device: BluesoundPlayer) -> None:
|
||||
"""Add slave to master."""
|
||||
await self._player.add_slave(slave_device.host, slave_device.port)
|
||||
|
||||
async def async_remove_slave(self, slave_device: BluesoundPlayer) -> None:
|
||||
"""Remove slave to master."""
|
||||
await self._player.remove_slave(slave_device.host, slave_device.port)
|
||||
async def async_remove_follower(self, host: str, port: int) -> None:
|
||||
"""Remove follower to leader."""
|
||||
await self._player.remove_follower(host, port)
|
||||
|
||||
async def async_increase_timer(self) -> int:
|
||||
"""Increase sleep time on player."""
|
||||
@ -672,7 +707,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
"""Select input source."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
@ -691,49 +726,49 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Clear players playlist."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.clear()
|
||||
|
||||
async def async_media_next_track(self) -> None:
|
||||
"""Send media_next command to media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.skip()
|
||||
|
||||
async def async_media_previous_track(self) -> None:
|
||||
"""Send media_previous command to media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.back()
|
||||
|
||||
async def async_media_play(self) -> None:
|
||||
"""Send media_play command to media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.play()
|
||||
|
||||
async def async_media_pause(self) -> None:
|
||||
"""Send media_pause command to media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.pause()
|
||||
|
||||
async def async_media_stop(self) -> None:
|
||||
"""Send stop command."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.stop()
|
||||
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Send media_seek command to media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
await self._player.play(seek=int(position))
|
||||
@ -742,7 +777,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Send the play_media command to the media player."""
|
||||
if self.is_grouped and not self.is_master:
|
||||
if self.is_grouped and not self.is_leader:
|
||||
return
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
|
@ -1,68 +0,0 @@
|
||||
"""Support for Bluesound devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import NamedTuple
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import ATTR_MASTER, DOMAIN
|
||||
|
||||
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
|
||||
|
||||
BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id})
|
||||
|
||||
|
||||
class ServiceMethodDetails(NamedTuple):
|
||||
"""Details for SERVICE_TO_METHOD mapping."""
|
||||
|
||||
method: str
|
||||
schema: vol.Schema
|
||||
|
||||
|
||||
SERVICE_TO_METHOD = {
|
||||
SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA),
|
||||
SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA),
|
||||
SERVICE_SET_TIMER: ServiceMethodDetails(
|
||||
method="async_increase_timer", schema=BS_SCHEMA
|
||||
),
|
||||
SERVICE_CLEAR_TIMER: ServiceMethodDetails(
|
||||
method="async_clear_timer", schema=BS_SCHEMA
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Bluesound component."""
|
||||
|
||||
async def async_service_handler(service: ServiceCall) -> None:
|
||||
"""Map services to method of Bluesound devices."""
|
||||
if not (method := SERVICE_TO_METHOD.get(service.service)):
|
||||
return
|
||||
|
||||
params = {
|
||||
key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID
|
||||
}
|
||||
if entity_ids := service.data.get(ATTR_ENTITY_ID):
|
||||
target_players = [
|
||||
player for player in hass.data[DOMAIN] if player.entity_id in entity_ids
|
||||
]
|
||||
else:
|
||||
target_players = hass.data[DOMAIN]
|
||||
|
||||
for player in target_players:
|
||||
await getattr(player, method.method)(**params)
|
||||
|
||||
for service, method in SERVICE_TO_METHOD.items():
|
||||
hass.services.async_register(
|
||||
DOMAIN, service, async_service_handler, schema=method.schema
|
||||
)
|
@ -6,3 +6,16 @@ from homeassistant.helpers.device_registry import format_mac
|
||||
def format_unique_id(mac: str, port: int) -> str:
|
||||
"""Generate a unique ID based on the MAC address and port number."""
|
||||
return f"{format_mac(mac)}-{port}"
|
||||
|
||||
|
||||
def dispatcher_join_signal(entity_id: str) -> str:
|
||||
"""Join an entity ID with a signal."""
|
||||
return f"bluesound_join_{entity_id}"
|
||||
|
||||
|
||||
def dispatcher_unjoin_signal(leader_id: str) -> str:
|
||||
"""Unjoin an entity ID with a signal.
|
||||
|
||||
Id is ip_address:port. This can be obtained from sync_status.id.
|
||||
"""
|
||||
return f"bluesound_unjoin_{leader_id}"
|
||||
|
@ -6,7 +6,6 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
@ -50,7 +49,7 @@ SERVICE_UPDATE_STATE = "update_state"
|
||||
|
||||
@callback
|
||||
def _async_migrate_options_from_data_if_missing(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
hass: HomeAssistant, entry: BMWConfigEntry
|
||||
) -> None:
|
||||
data = dict(entry.data)
|
||||
options = dict(entry.options)
|
||||
@ -74,23 +73,29 @@ async def _async_migrate_entries(
|
||||
@callback
|
||||
def update_unique_id(entry: er.RegistryEntry) -> dict[str, str] | None:
|
||||
replacements = {
|
||||
"charging_level_hv": "fuel_and_battery.remaining_battery_percent",
|
||||
"fuel_percent": "fuel_and_battery.remaining_fuel_percent",
|
||||
"ac_current_limit": "charging_profile.ac_current_limit",
|
||||
"charging_start_time": "fuel_and_battery.charging_start_time",
|
||||
"charging_end_time": "fuel_and_battery.charging_end_time",
|
||||
"charging_status": "fuel_and_battery.charging_status",
|
||||
"charging_target": "fuel_and_battery.charging_target",
|
||||
"remaining_battery_percent": "fuel_and_battery.remaining_battery_percent",
|
||||
"remaining_range_total": "fuel_and_battery.remaining_range_total",
|
||||
"remaining_range_electric": "fuel_and_battery.remaining_range_electric",
|
||||
"remaining_range_fuel": "fuel_and_battery.remaining_range_fuel",
|
||||
"remaining_fuel": "fuel_and_battery.remaining_fuel",
|
||||
"remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent",
|
||||
"activity": "climate.activity",
|
||||
Platform.SENSOR.value: {
|
||||
"charging_level_hv": "fuel_and_battery.remaining_battery_percent",
|
||||
"fuel_percent": "fuel_and_battery.remaining_fuel_percent",
|
||||
"ac_current_limit": "charging_profile.ac_current_limit",
|
||||
"charging_start_time": "fuel_and_battery.charging_start_time",
|
||||
"charging_end_time": "fuel_and_battery.charging_end_time",
|
||||
"charging_status": "fuel_and_battery.charging_status",
|
||||
"charging_target": "fuel_and_battery.charging_target",
|
||||
"remaining_battery_percent": "fuel_and_battery.remaining_battery_percent",
|
||||
"remaining_range_total": "fuel_and_battery.remaining_range_total",
|
||||
"remaining_range_electric": "fuel_and_battery.remaining_range_electric",
|
||||
"remaining_range_fuel": "fuel_and_battery.remaining_range_fuel",
|
||||
"remaining_fuel": "fuel_and_battery.remaining_fuel",
|
||||
"remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent",
|
||||
"activity": "climate.activity",
|
||||
}
|
||||
}
|
||||
if (key := entry.unique_id.split("-")[-1]) in replacements:
|
||||
new_unique_id = entry.unique_id.replace(key, replacements[key])
|
||||
if (key := entry.unique_id.split("-")[-1]) in replacements.get(
|
||||
entry.domain, []
|
||||
):
|
||||
new_unique_id = entry.unique_id.replace(
|
||||
key, replacements[entry.domain][key]
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Migrating entity '%s' unique_id from '%s' to '%s'",
|
||||
entry.entity_id,
|
||||
@ -116,7 +121,7 @@ async def _async_migrate_entries(
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool:
|
||||
"""Set up BMW Connected Drive from a config entry."""
|
||||
|
||||
_async_migrate_options_from_data_if_missing(hass, entry)
|
||||
@ -164,7 +169,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -55,7 +55,6 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = (
|
||||
BMWButtonEntityDescription(
|
||||
key="deactivate_air_conditioning",
|
||||
translation_key="deactivate_air_conditioning",
|
||||
name="Deactivate air conditioning",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(),
|
||||
is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled,
|
||||
),
|
||||
@ -111,6 +110,10 @@ class BMWButton(BMWBaseEntity, ButtonEntity):
|
||||
try:
|
||||
await self.entity_description.remote_function(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -18,7 +18,6 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@ -39,6 +38,7 @@ from .const import (
|
||||
CONF_READ_ONLY,
|
||||
CONF_REFRESH_TOKEN,
|
||||
)
|
||||
from .coordinator import BMWConfigEntry
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@ -103,9 +103,10 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
_existing_entry_data: Mapping[str, Any] | None = None
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.data: dict[str, Any] = {}
|
||||
self._existing_entry_data: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -175,19 +176,15 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the change password step."""
|
||||
existing_data = (
|
||||
dict(self._existing_entry_data) if self._existing_entry_data else {}
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_step_user(existing_data | user_input)
|
||||
return await self.async_step_user(self._existing_entry_data | user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="change_password",
|
||||
data_schema=RECONFIGURE_SCHEMA,
|
||||
description_placeholders={
|
||||
CONF_USERNAME: existing_data[CONF_USERNAME],
|
||||
CONF_REGION: existing_data[CONF_REGION],
|
||||
CONF_USERNAME: self._existing_entry_data[CONF_USERNAME],
|
||||
CONF_REGION: self._existing_entry_data[CONF_REGION],
|
||||
},
|
||||
)
|
||||
|
||||
@ -195,14 +192,14 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._existing_entry_data = entry_data
|
||||
self._existing_entry_data = dict(entry_data)
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow initialized by the user."""
|
||||
self._existing_entry_data = self._get_reconfigure_entry().data
|
||||
self._existing_entry_data = dict(self._get_reconfigure_entry().data)
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_captcha(
|
||||
@ -224,7 +221,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: BMWConfigEntry,
|
||||
) -> BMWOptionsFlow:
|
||||
"""Return a MyBMW option flow."""
|
||||
return BMWOptionsFlow()
|
||||
|
@ -22,7 +22,13 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS
|
||||
from .const import (
|
||||
CONF_GCID,
|
||||
CONF_READ_ONLY,
|
||||
CONF_REFRESH_TOKEN,
|
||||
DOMAIN as BMW_DOMAIN,
|
||||
SCAN_INTERVALS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -36,7 +42,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
account: MyBMWAccount
|
||||
config_entry: BMWConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, *, config_entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, *, config_entry: BMWConfigEntry) -> None:
|
||||
"""Initialize account-wide BMW data updater."""
|
||||
self.account = MyBMWAccount(
|
||||
config_entry.data[CONF_USERNAME],
|
||||
@ -57,7 +63,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}-{config_entry.data[CONF_USERNAME]}",
|
||||
name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}",
|
||||
update_interval=timedelta(
|
||||
seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]]
|
||||
),
|
||||
@ -75,18 +81,29 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
except MyBMWCaptchaMissingError as err:
|
||||
# If a captcha is required (user/password login flow), always trigger the reauth flow
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="missing_captcha",
|
||||
) from err
|
||||
except MyBMWAuthError as err:
|
||||
# Allow one retry interval before raising AuthFailed to avoid flaky API issues
|
||||
if self.last_update_success:
|
||||
raise UpdateFailed(err) from err
|
||||
raise UpdateFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
# Clear refresh token and trigger reauth if previous update failed as well
|
||||
self._update_config_entry_refresh_token(None)
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from err
|
||||
except (MyBMWAPIError, RequestError) as err:
|
||||
raise UpdateFailed(err) from err
|
||||
raise UpdateFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
|
||||
if self.account.refresh_token != old_refresh_token:
|
||||
self._update_config_entry_refresh_token(self.account.refresh_token)
|
||||
|
@ -49,7 +49,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity):
|
||||
|
||||
_attr_force_update = False
|
||||
_attr_translation_key = "car"
|
||||
_attr_icon = "mdi:car"
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -58,9 +58,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity):
|
||||
) -> None:
|
||||
"""Initialize the Tracker."""
|
||||
super().__init__(coordinator, vehicle)
|
||||
|
||||
self._attr_unique_id = vehicle.vin
|
||||
self._attr_name = None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
|
@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -70,7 +70,11 @@ class BMWLock(BMWBaseEntity, LockEntity):
|
||||
# Set the state to unknown if the command fails
|
||||
self._attr_is_locked = None
|
||||
self.async_write_ha_state()
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
finally:
|
||||
# Always update the listeners to get the latest state
|
||||
self.coordinator.async_update_listeners()
|
||||
@ -90,7 +94,11 @@ class BMWLock(BMWBaseEntity, LockEntity):
|
||||
# Set the state to unknown if the command fails
|
||||
self._attr_is_locked = None
|
||||
self.async_write_ha_state()
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
finally:
|
||||
# Always update the listeners to get the latest state
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -20,7 +20,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN, BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@ -92,7 +92,7 @@ class BMWNotificationService(BaseNotificationService):
|
||||
|
||||
except (vol.Invalid, TypeError, ValueError) as ex:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="invalid_poi",
|
||||
translation_placeholders={
|
||||
"poi_exception": str(ex),
|
||||
@ -106,4 +106,8 @@ class BMWNotificationService(BaseNotificationService):
|
||||
try:
|
||||
await vehicle.remote_services.trigger_send_poi(poi)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -109,6 +109,10 @@ class BMWNumber(BMWBaseEntity, NumberEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service(self.vehicle, value)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -123,6 +123,10 @@ class BMWSelect(BMWBaseEntity, SelectEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service(self.vehicle, option)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -2,11 +2,16 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Enter your MyBMW/MINI Connected credentials.",
|
||||
"description": "Connect to your MyBMW/MINI Connected account to retrieve vehicle data.",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"region": "ConnectedDrive Region"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The email address of your MyBMW/MINI Connected account.",
|
||||
"password": "The password of your MyBMW/MINI Connected account.",
|
||||
"region": "The region of your MyBMW/MINI Connected account."
|
||||
}
|
||||
},
|
||||
"captcha": {
|
||||
@ -23,6 +28,9 @@
|
||||
"description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::bmw_connected_drive::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -41,7 +49,10 @@
|
||||
"step": {
|
||||
"account_options": {
|
||||
"data": {
|
||||
"read_only": "Read-only (only sensors and notify, no execution of services, no lock)"
|
||||
"read_only": "Read-only mode"
|
||||
},
|
||||
"data_description": {
|
||||
"read_only": "Only retrieve values and send POI data, but don't offer any services that can change the vehicle state."
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -83,6 +94,9 @@
|
||||
"activate_air_conditioning": {
|
||||
"name": "Activate air conditioning"
|
||||
},
|
||||
"deactivate_air_conditioning": {
|
||||
"name": "Deactivate air conditioning"
|
||||
},
|
||||
"find_vehicle": {
|
||||
"name": "Find vehicle"
|
||||
}
|
||||
@ -220,6 +234,15 @@
|
||||
},
|
||||
"missing_captcha": {
|
||||
"message": "Login requires captcha validation"
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"remote_service_error": {
|
||||
"message": "Error executing remote service on vehicle. {exception}"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Error updating vehicle data. {exception}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -111,8 +111,11 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service_on(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
self.coordinator.async_update_listeners()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@ -120,6 +123,9 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service_off(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -7,7 +7,7 @@ rules:
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
config-flow: todo
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: todo
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.3.9"]
|
||||
"requirements": ["caldav==1.3.9", "icalendar==6.1.0"]
|
||||
}
|
||||
|
85
homeassistant/components/cambridge_audio/media_browser.py
Normal file
85
homeassistant/components/cambridge_audio/media_browser.py
Normal file
@ -0,0 +1,85 @@
|
||||
"""Support for media browsing."""
|
||||
|
||||
from aiostreammagic import StreamMagicClient
|
||||
from aiostreammagic.models import Preset
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_browse_media(
|
||||
hass: HomeAssistant,
|
||||
client: StreamMagicClient,
|
||||
media_content_id: str | None,
|
||||
media_content_type: str | None,
|
||||
) -> BrowseMedia:
|
||||
"""Browse media."""
|
||||
|
||||
if media_content_type == "presets":
|
||||
return await _presets_payload(client.preset_list.presets)
|
||||
|
||||
return await _root_payload(
|
||||
hass,
|
||||
client,
|
||||
)
|
||||
|
||||
|
||||
async def _root_payload(
|
||||
hass: HomeAssistant,
|
||||
client: StreamMagicClient,
|
||||
) -> BrowseMedia:
|
||||
"""Return root payload for Cambridge Audio."""
|
||||
children: list[BrowseMedia] = []
|
||||
|
||||
if client.preset_list.presets:
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
title="Presets",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="presets",
|
||||
thumbnail="https://brands.home-assistant.io/_/cambridge_audio/logo.png",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
)
|
||||
|
||||
return BrowseMedia(
|
||||
title="Cambridge Audio",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="root",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=children,
|
||||
)
|
||||
|
||||
|
||||
async def _presets_payload(presets: list[Preset]) -> BrowseMedia:
|
||||
"""Create payload to list presets."""
|
||||
|
||||
children: list[BrowseMedia] = []
|
||||
for preset in presets:
|
||||
if preset.state != "OK":
|
||||
continue
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
title=preset.name,
|
||||
media_class=MediaClass.MUSIC,
|
||||
media_content_id=str(preset.preset_id),
|
||||
media_content_type="preset",
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
thumbnail=preset.art_url,
|
||||
)
|
||||
)
|
||||
|
||||
return BrowseMedia(
|
||||
title="Presets",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="presets",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=children,
|
||||
)
|
@ -13,6 +13,7 @@ from aiostreammagic import (
|
||||
)
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
BrowseMedia,
|
||||
MediaPlayerDeviceClass,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@ -24,7 +25,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import CambridgeAudioConfigEntry
|
||||
from . import CambridgeAudioConfigEntry, media_browser
|
||||
from .const import (
|
||||
CAMBRIDGE_MEDIA_TYPE_AIRABLE,
|
||||
CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO,
|
||||
@ -34,7 +35,8 @@ from .const import (
|
||||
from .entity import CambridgeAudioEntity, command
|
||||
|
||||
BASE_FEATURES = (
|
||||
MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
@ -338,3 +340,13 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
|
||||
|
||||
if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO:
|
||||
await self.client.play_radio_url("Radio", media_id)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
media_content_id: str | None = None,
|
||||
) -> BrowseMedia:
|
||||
"""Implement the media browsing helper."""
|
||||
return await media_browser.async_browse_media(
|
||||
self.hass, self.client, media_content_id, media_content_type
|
||||
)
|
||||
|
228
homeassistant/components/cloud/backup.py
Normal file
228
homeassistant/components/cloud/backup.py
Normal file
@ -0,0 +1,228 @@
|
||||
"""Backup platform for the cloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
from typing import Any, Self
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
async_files_delete_file,
|
||||
async_files_download_details,
|
||||
async_files_list,
|
||||
async_files_upload_details,
|
||||
)
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_STORAGE_BACKUP = "backup"
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
"""Calculate the MD5 hash of a file."""
|
||||
file_hash = hashlib.md5()
|
||||
async for chunk in stream:
|
||||
file_hash.update(chunk)
|
||||
return base64.b64encode(file_hash.digest()).decode()
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return the cloud backup agent."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
if not cloud.is_logged_in:
|
||||
return []
|
||||
|
||||
return [CloudBackupAgent(hass=hass, cloud=cloud)]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed."""
|
||||
|
||||
@callback
|
||||
def unsub() -> None:
|
||||
"""Unsubscribe from events."""
|
||||
unsub_signal()
|
||||
|
||||
@callback
|
||||
def handle_event(data: Mapping[str, Any]) -> None:
|
||||
"""Handle event."""
|
||||
if data["type"] not in ("login", "logout"):
|
||||
return
|
||||
listener()
|
||||
|
||||
unsub_signal = async_dispatcher_connect(hass, EVENT_CLOUD_EVENT, handle_event)
|
||||
return unsub
|
||||
|
||||
|
||||
class ChunkAsyncStreamIterator:
|
||||
"""Async iterator for chunked streams.
|
||||
|
||||
Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields
|
||||
bytes instead of tuple[bytes, bool].
|
||||
"""
|
||||
|
||||
__slots__ = ("_stream",)
|
||||
|
||||
def __init__(self, stream: StreamReader) -> None:
|
||||
"""Initialize."""
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Iterate."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Yield next chunk."""
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv[0]
|
||||
|
||||
|
||||
class CloudBackupAgent(BackupAgent):
|
||||
"""Cloud backup agent."""
|
||||
|
||||
domain = DOMAIN
|
||||
name = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Initialize the cloud backup sync agent."""
|
||||
super().__init__()
|
||||
self._cloud = cloud
|
||||
self._hass = hass
|
||||
|
||||
@callback
|
||||
def _get_backup_filename(self) -> str:
|
||||
"""Return the backup filename."""
|
||||
return f"{self._cloud.client.prefs.instance_id}.tar"
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
if not await self.async_get_backup(backup_id):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
try:
|
||||
details = await async_files_download_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(details["url"])
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
if not backup.protected:
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
metadata=backup.as_dict(),
|
||||
size=backup.size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
if not await self.async_get_backup(backup_id):
|
||||
return
|
||||
|
||||
try:
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to delete backup") from err
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
backups = await self.async_list_backups()
|
||||
|
||||
for backup in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
return backup
|
||||
|
||||
return None
|
@ -306,6 +306,7 @@ class CloudClient(Interface):
|
||||
},
|
||||
"version": HA_VERSION,
|
||||
"instance_id": self.prefs.instance_id,
|
||||
"name": self._hass.config.location_name,
|
||||
}
|
||||
|
||||
async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
@ -18,6 +18,8 @@ DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
|
||||
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||
"cloud_platforms_setup"
|
||||
)
|
||||
EVENT_CLOUD_EVENT = "cloud_event"
|
||||
|
||||
REQUEST_TIMEOUT = 10
|
||||
|
||||
PREF_ENABLE_ALEXA = "alexa_enabled"
|
||||
@ -88,3 +90,5 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update")
|
||||
|
||||
STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
|
||||
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
|
||||
|
||||
LOGIN_MFA_TIMEOUT = 60
|
||||
|
@ -9,6 +9,7 @@ import dataclasses
|
||||
from functools import wraps
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Concatenate
|
||||
|
||||
import aiohttp
|
||||
@ -31,7 +32,9 @@ from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
|
||||
from .alexa_config import entity_supported as entity_supported_by_alexa
|
||||
@ -39,6 +42,8 @@ from .assist_pipeline import async_create_cloud_pipeline
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
DATA_CLOUD,
|
||||
EVENT_CLOUD_EVENT,
|
||||
LOGIN_MFA_TIMEOUT,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_ENABLE_ALEXA,
|
||||
@ -69,6 +74,10 @@ _CLOUD_ERRORS: dict[type[Exception], tuple[HTTPStatus, str]] = {
|
||||
}
|
||||
|
||||
|
||||
class MFAExpiredOrNotStarted(auth.CloudError):
|
||||
"""Multi-factor authentication expired, or not started."""
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Initialize the HTTP API."""
|
||||
@ -101,6 +110,11 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
|
||||
_CLOUD_ERRORS.update(
|
||||
{
|
||||
auth.InvalidTotpCode: (HTTPStatus.BAD_REQUEST, "Invalid TOTP code."),
|
||||
auth.MFARequired: (
|
||||
HTTPStatus.UNAUTHORIZED,
|
||||
"Multi-factor authentication required.",
|
||||
),
|
||||
auth.UserNotFound: (HTTPStatus.BAD_REQUEST, "User does not exist."),
|
||||
auth.UserNotConfirmed: (HTTPStatus.BAD_REQUEST, "Email not confirmed."),
|
||||
auth.UserExists: (
|
||||
@ -112,6 +126,10 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Password change required.",
|
||||
),
|
||||
MFAExpiredOrNotStarted: (
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Multi-factor authentication expired, or not started. Please try again.",
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@ -206,24 +224,64 @@ class GoogleActionsSyncView(HomeAssistantView):
|
||||
class CloudLoginView(HomeAssistantView):
|
||||
"""Login to Home Assistant cloud."""
|
||||
|
||||
_mfa_tokens: dict[str, str] = {}
|
||||
_mfa_tokens_set_time: float = 0
|
||||
|
||||
url = "/api/cloud/login"
|
||||
name = "api:cloud:login"
|
||||
|
||||
@require_admin
|
||||
@_handle_cloud_errors
|
||||
@RequestDataValidator(
|
||||
vol.Schema({vol.Required("email"): str, vol.Required("password"): str})
|
||||
vol.Schema(
|
||||
vol.All(
|
||||
{
|
||||
vol.Required("email"): str,
|
||||
vol.Exclusive("password", "login"): str,
|
||||
vol.Exclusive("code", "login"): str,
|
||||
},
|
||||
cv.has_at_least_one_key("password", "code"),
|
||||
)
|
||||
)
|
||||
)
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle login request."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
await cloud.login(data["email"], data["password"])
|
||||
|
||||
try:
|
||||
email = data["email"]
|
||||
password = data.get("password")
|
||||
code = data.get("code")
|
||||
|
||||
if email and password:
|
||||
await cloud.login(email, password)
|
||||
|
||||
else:
|
||||
if (
|
||||
not self._mfa_tokens
|
||||
or time.time() - self._mfa_tokens_set_time > LOGIN_MFA_TIMEOUT
|
||||
):
|
||||
raise MFAExpiredOrNotStarted
|
||||
|
||||
# Voluptuous should ensure that code is not None because password is
|
||||
assert code is not None
|
||||
|
||||
await cloud.login_verify_totp(email, code, self._mfa_tokens)
|
||||
self._mfa_tokens = {}
|
||||
self._mfa_tokens_set_time = 0
|
||||
|
||||
except auth.MFARequired as mfa_err:
|
||||
self._mfa_tokens = mfa_err.mfa_tokens
|
||||
self._mfa_tokens_set_time = time.time()
|
||||
raise
|
||||
|
||||
if "assist_pipeline" in hass.config.components:
|
||||
new_cloud_pipeline_id = await async_create_cloud_pipeline(hass)
|
||||
else:
|
||||
new_cloud_pipeline_id = None
|
||||
|
||||
async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"})
|
||||
return self.json({"success": True, "cloud_pipeline": new_cloud_pipeline_id})
|
||||
|
||||
|
||||
@ -243,6 +301,7 @@ class CloudLogoutView(HomeAssistantView):
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
await cloud.logout()
|
||||
|
||||
async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "logout"})
|
||||
return self.json_message("ok")
|
||||
|
||||
|
||||
|
@ -1,13 +1,18 @@
|
||||
{
|
||||
"domain": "cloud",
|
||||
"name": "Home Assistant Cloud",
|
||||
"after_dependencies": ["assist_pipeline", "google_assistant", "alexa"],
|
||||
"after_dependencies": [
|
||||
"alexa",
|
||||
"assist_pipeline",
|
||||
"backup",
|
||||
"google_assistant"
|
||||
],
|
||||
"codeowners": ["@home-assistant/cloud"],
|
||||
"dependencies": ["auth", "http", "repairs", "webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/cloud",
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.86.0"],
|
||||
"requirements": ["hass-nabucasa==0.87.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.1.3"]
|
||||
"requirements": ["numpy==2.2.0"]
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ from .agent_manager import (
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
||||
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE, DefaultAgent
|
||||
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@ -162,8 +162,7 @@ async def websocket_list_sentences(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""List custom registered sentences."""
|
||||
agent = hass.data.get(DATA_DEFAULT_ENTITY)
|
||||
assert isinstance(agent, DefaultAgent)
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
|
||||
sentences = []
|
||||
for trigger_data in agent.trigger_sentences:
|
||||
@ -185,8 +184,7 @@ async def websocket_hass_agent_debug(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Return intents that would be matched by the default agent for a list of sentences."""
|
||||
agent = hass.data.get(DATA_DEFAULT_ENTITY)
|
||||
assert isinstance(agent, DefaultAgent)
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
|
||||
# Return results for each sentence in the same order as the input.
|
||||
result_dicts: list[dict[str, Any] | None] = []
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"]
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
|
||||
}
|
||||
|
49
homeassistant/components/cookidoo/__init__.py
Normal file
49
homeassistant/components/cookidoo/__init__.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""The Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
CONF_EMAIL,
|
||||
CONF_LANGUAGE,
|
||||
CONF_PASSWORD,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.TODO]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
247
homeassistant/components/cookidoo/config_flow.py
Normal file
247
homeassistant/components/cookidoo/config_flow.py
Normal file
@ -0,0 +1,247 @@
|
||||
"""Config flow for Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
CountrySelector,
|
||||
CountrySelectorConfig,
|
||||
LanguageSelector,
|
||||
LanguageSelectorConfig,
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUTH_DATA_SCHEMA = {
|
||||
vol.Required(CONF_EMAIL): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.EMAIL,
|
||||
autocomplete="email",
|
||||
),
|
||||
),
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Cookidoo."""
|
||||
|
||||
COUNTRY_DATA_SCHEMA: dict
|
||||
LANGUAGE_DATA_SCHEMA: dict
|
||||
|
||||
user_input: dict[str, Any]
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reconfigure upon an user action."""
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the user step as well as serve for reconfiguration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None and not (
|
||||
errors := await self.validate_input(user_input)
|
||||
):
|
||||
if self.source == SOURCE_USER:
|
||||
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
|
||||
self.user_input = user_input
|
||||
return await self.async_step_language()
|
||||
await self.generate_country_schema()
|
||||
suggested_values: dict = {}
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
suggested_values = {
|
||||
**suggested_values,
|
||||
**reconfigure_entry.data,
|
||||
}
|
||||
if user_input is not None:
|
||||
suggested_values = {**suggested_values, **user_input}
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=vol.Schema(
|
||||
{**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA}
|
||||
),
|
||||
suggested_values=suggested_values,
|
||||
),
|
||||
description_placeholders={"cookidoo": "Cookidoo"},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_language(
|
||||
self,
|
||||
language_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Async language step to set up the connection."""
|
||||
errors: dict[str, str] = {}
|
||||
if language_input is not None and not (
|
||||
errors := await self.validate_input(self.user_input, language_input)
|
||||
):
|
||||
if self.source == SOURCE_USER:
|
||||
return self.async_create_entry(
|
||||
title="Cookidoo", data={**self.user_input, **language_input}
|
||||
)
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data={
|
||||
**reconfigure_entry.data,
|
||||
**self.user_input,
|
||||
**language_input,
|
||||
},
|
||||
)
|
||||
|
||||
await self.generate_language_schema()
|
||||
return self.async_show_form(
|
||||
step_id="language",
|
||||
data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA),
|
||||
description_placeholders={"cookidoo": "Cookidoo"},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
if not (
|
||||
errors := await self.validate_input({**reauth_entry.data, **user_input})
|
||||
):
|
||||
if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_EMAIL: user_input[CONF_EMAIL]}
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry, data_updates=user_input
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=vol.Schema(AUTH_DATA_SCHEMA),
|
||||
suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]},
|
||||
),
|
||||
description_placeholders={"cookidoo": "Cookidoo"},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def generate_country_schema(self) -> None:
|
||||
"""Generate country schema."""
|
||||
self.COUNTRY_DATA_SCHEMA = {
|
||||
vol.Required(CONF_COUNTRY): CountrySelector(
|
||||
CountrySelectorConfig(
|
||||
countries=[
|
||||
country.upper() for country in await get_country_options()
|
||||
],
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
async def generate_language_schema(self) -> None:
|
||||
"""Generate language schema."""
|
||||
self.LANGUAGE_DATA_SCHEMA = {
|
||||
vol.Required(CONF_LANGUAGE): LanguageSelector(
|
||||
LanguageSelectorConfig(
|
||||
languages=[
|
||||
option.language
|
||||
for option in await get_localization_options(
|
||||
country=self.user_input[CONF_COUNTRY].lower()
|
||||
)
|
||||
],
|
||||
native_name=True,
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
async def validate_input(
|
||||
self,
|
||||
user_input: dict[str, Any],
|
||||
language_input: dict[str, Any] | None = None,
|
||||
) -> dict[str, str]:
|
||||
"""Input Helper."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
data_input: dict[str, Any] = {}
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
data_input = {**data_input, **reconfigure_entry.data}
|
||||
data_input = {**data_input, **user_input}
|
||||
if language_input:
|
||||
data_input = {**data_input, **language_input}
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0] # Pick any language to test login
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
session,
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
)
|
||||
try:
|
||||
await cookidoo.login()
|
||||
if language_input:
|
||||
await cookidoo.get_additional_items()
|
||||
except CookidooRequestException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CookidooAuthException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
return errors
|
3
homeassistant/components/cookidoo/const.py
Normal file
3
homeassistant/components/cookidoo/const.py
Normal file
@ -0,0 +1,3 @@
|
||||
"""Constants for the Cookidoo integration."""
|
||||
|
||||
DOMAIN = "cookidoo"
|
101
homeassistant/components/cookidoo/coordinator.py
Normal file
101
homeassistant/components/cookidoo/coordinator.py
Normal file
@ -0,0 +1,101 @@
|
||||
"""DataUpdateCoordinator for the Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAdditionalItem,
|
||||
CookidooAuthException,
|
||||
CookidooException,
|
||||
CookidooIngredientItem,
|
||||
CookidooRequestException,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type CookidooConfigEntry = ConfigEntry[CookidooDataUpdateCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class CookidooData:
|
||||
"""Cookidoo data type."""
|
||||
|
||||
ingredient_items: list[CookidooIngredientItem]
|
||||
additional_items: list[CookidooAdditionalItem]
|
||||
|
||||
|
||||
class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]):
|
||||
"""A Cookidoo Data Update Coordinator."""
|
||||
|
||||
config_entry: CookidooConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, cookidoo: Cookidoo, entry: CookidooConfigEntry
|
||||
) -> None:
|
||||
"""Initialize the Cookidoo data coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=90),
|
||||
config_entry=entry,
|
||||
)
|
||||
self.cookidoo = cookidoo
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
try:
|
||||
await self.cookidoo.login()
|
||||
except CookidooRequestException as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_request_exception",
|
||||
) from e
|
||||
except CookidooAuthException as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_authentication_exception",
|
||||
translation_placeholders={
|
||||
CONF_EMAIL: self.config_entry.data[CONF_EMAIL]
|
||||
},
|
||||
) from e
|
||||
|
||||
async def _async_update_data(self) -> CookidooData:
|
||||
try:
|
||||
ingredient_items = await self.cookidoo.get_ingredient_items()
|
||||
additional_items = await self.cookidoo.get_additional_items()
|
||||
except CookidooAuthException:
|
||||
try:
|
||||
await self.cookidoo.refresh_token()
|
||||
except CookidooAuthException as exc:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_authentication_exception",
|
||||
translation_placeholders={
|
||||
CONF_EMAIL: self.config_entry.data[CONF_EMAIL]
|
||||
},
|
||||
) from exc
|
||||
_LOGGER.debug(
|
||||
"Authentication failed but re-authentication was successful, trying again later"
|
||||
)
|
||||
return self.data
|
||||
except CookidooException as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_exception",
|
||||
) from e
|
||||
|
||||
return CookidooData(
|
||||
ingredient_items=ingredient_items, additional_items=additional_items
|
||||
)
|
30
homeassistant/components/cookidoo/entity.py
Normal file
30
homeassistant/components/cookidoo/entity.py
Normal file
@ -0,0 +1,30 @@
|
||||
"""Base entity for the Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooDataUpdateCoordinator
|
||||
|
||||
|
||||
class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]):
|
||||
"""Cookidoo base entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CookidooDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name="Cookidoo",
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
manufacturer="Vorwerk International & Co. KmG",
|
||||
model="Cookidoo - Thermomix® recipe portal",
|
||||
)
|
12
homeassistant/components/cookidoo/icons.json
Normal file
12
homeassistant/components/cookidoo/icons.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"default": "mdi:cart-plus"
|
||||
},
|
||||
"additional_item_list": {
|
||||
"default": "mdi:cart-plus"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
11
homeassistant/components/cookidoo/manifest.json
Normal file
11
homeassistant/components/cookidoo/manifest.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "cookidoo",
|
||||
"name": "Cookidoo",
|
||||
"codeowners": ["@miaucl"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
}
|
90
homeassistant/components/cookidoo/quality_scale.yaml
Normal file
90
homeassistant/components/cookidoo/quality_scale.yaml
Normal file
@ -0,0 +1,90 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No service actions implemented
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: No service actions implemented
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions:
|
||||
status: exempt
|
||||
comment: No special external action required
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No callbacks are implemented
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
config-entry-unloading: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: Offloaded to coordinator
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: Offloaded to coordinator
|
||||
action-exceptions:
|
||||
status: done
|
||||
comment: Only providing todo actions
|
||||
reauthentication-flow: done
|
||||
parallel-updates: done
|
||||
test-coverage: done
|
||||
integration-owner: done
|
||||
docs-installation-parameters: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options flow
|
||||
|
||||
# Gold
|
||||
entity-translations: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: currently no platform with device classes
|
||||
devices: done
|
||||
entity-category: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: No disabled entities implemented
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Nothing to discover
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: No stale entities possible
|
||||
diagnostics: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: No dynamic entities available
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discoverable entities implemented
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No issues/repairs
|
||||
docs-use-cases: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-data-update: done
|
||||
docs-known-limitations: done
|
||||
docs-troubleshooting: todo
|
||||
docs-examples: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
80
homeassistant/components/cookidoo/strings.json
Normal file
80
homeassistant/components/cookidoo/strings.json
Normal file
@ -0,0 +1,80 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Setup {cookidoo}",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"country": "Country"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "Email used to access your {cookidoo} account.",
|
||||
"password": "Password used to access your {cookidoo} account.",
|
||||
"country": "Pick your country for the {cookidoo} content."
|
||||
}
|
||||
},
|
||||
"language": {
|
||||
"title": "Setup {cookidoo}",
|
||||
"data": {
|
||||
"language": "[%key:common::config_flow::data::language%]"
|
||||
},
|
||||
"data_description": {
|
||||
"language": "Pick your language for the {cookidoo} content."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "Login again to {cookidoo}",
|
||||
"description": "Please log in to {cookidoo} again to continue using this integration.",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "[%key:component::cookidoo::config::step::user::data_description::email%]",
|
||||
"password": "[%key:component::cookidoo::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"name": "Shopping list"
|
||||
},
|
||||
"additional_item_list": {
|
||||
"name": "Additional purchases"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"todo_save_item_failed": {
|
||||
"message": "Failed to save {name} to Cookidoo shopping list"
|
||||
},
|
||||
"todo_update_item_failed": {
|
||||
"message": "Failed to update {name} in Cookidoo shopping list"
|
||||
},
|
||||
"todo_delete_item_failed": {
|
||||
"message": "Failed to delete {count} item(s) from Cookidoo shopping list"
|
||||
},
|
||||
"setup_request_exception": {
|
||||
"message": "Failed to connect to server, try again later"
|
||||
},
|
||||
"setup_authentication_exception": {
|
||||
"message": "Authentication failed for {email}, check your email and password"
|
||||
},
|
||||
"update_exception": {
|
||||
"message": "Unable to connect and retrieve data from cookidoo"
|
||||
}
|
||||
}
|
||||
}
|
185
homeassistant/components/cookidoo/todo.py
Normal file
185
homeassistant/components/cookidoo/todo.py
Normal file
@ -0,0 +1,185 @@
|
||||
"""Todo platform for the Cookidoo integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from cookidoo_api import (
|
||||
CookidooAdditionalItem,
|
||||
CookidooException,
|
||||
CookidooIngredientItem,
|
||||
)
|
||||
|
||||
from homeassistant.components.todo import (
|
||||
TodoItem,
|
||||
TodoItemStatus,
|
||||
TodoListEntity,
|
||||
TodoListEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .entity import CookidooBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: CookidooConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the todo list from a config entry created in the integrations UI."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
CookidooIngredientsTodoListEntity(coordinator),
|
||||
CookidooAdditionalItemTodoListEntity(coordinator),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
"""A To-do List representation of the ingredients in the Cookidoo Shopping List."""
|
||||
|
||||
_attr_translation_key = "ingredient_list"
|
||||
_attr_supported_features = TodoListEntityFeature.UPDATE_TODO_ITEM
|
||||
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
"""Return the todo ingredients."""
|
||||
return [
|
||||
TodoItem(
|
||||
uid=item.id,
|
||||
summary=item.name,
|
||||
description=item.description or "",
|
||||
status=(
|
||||
TodoItemStatus.COMPLETED
|
||||
if item.is_owned
|
||||
else TodoItemStatus.NEEDS_ACTION
|
||||
),
|
||||
)
|
||||
for item in self.coordinator.data.ingredient_items
|
||||
]
|
||||
|
||||
async def async_update_todo_item(self, item: TodoItem) -> None:
|
||||
"""Update an ingredient to the To-do list.
|
||||
|
||||
Cookidoo ingredients can be changed in state, but not in summary or description. This is currently not possible to distinguish in home assistant and just fails silently.
|
||||
"""
|
||||
try:
|
||||
if TYPE_CHECKING:
|
||||
assert item.uid
|
||||
await self.coordinator.cookidoo.edit_ingredient_items_ownership(
|
||||
[
|
||||
CookidooIngredientItem(
|
||||
id=item.uid,
|
||||
name="",
|
||||
description="",
|
||||
is_owned=item.status == TodoItemStatus.COMPLETED,
|
||||
)
|
||||
]
|
||||
)
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="todo_update_item_failed",
|
||||
translation_placeholders={"name": item.summary or ""},
|
||||
) from e
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
|
||||
class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
"""A To-do List representation of the additional items in the Cookidoo Shopping List."""
|
||||
|
||||
_attr_translation_key = "additional_item_list"
|
||||
_attr_supported_features = (
|
||||
TodoListEntityFeature.CREATE_TODO_ITEM
|
||||
| TodoListEntityFeature.UPDATE_TODO_ITEM
|
||||
| TodoListEntityFeature.DELETE_TODO_ITEM
|
||||
)
|
||||
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
"""Return the todo items."""
|
||||
|
||||
return [
|
||||
TodoItem(
|
||||
uid=item.id,
|
||||
summary=item.name,
|
||||
status=(
|
||||
TodoItemStatus.COMPLETED
|
||||
if item.is_owned
|
||||
else TodoItemStatus.NEEDS_ACTION
|
||||
),
|
||||
)
|
||||
for item in self.coordinator.data.additional_items
|
||||
]
|
||||
|
||||
async def async_create_todo_item(self, item: TodoItem) -> None:
|
||||
"""Add an item to the To-do list."""
|
||||
|
||||
try:
|
||||
if TYPE_CHECKING:
|
||||
assert item.summary
|
||||
await self.coordinator.cookidoo.add_additional_items([item.summary])
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="todo_save_item_failed",
|
||||
translation_placeholders={"name": item.summary or ""},
|
||||
) from e
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_update_todo_item(self, item: TodoItem) -> None:
|
||||
"""Update an item to the To-do list."""
|
||||
|
||||
try:
|
||||
if TYPE_CHECKING:
|
||||
assert item.uid
|
||||
assert item.summary
|
||||
new_item = CookidooAdditionalItem(
|
||||
id=item.uid,
|
||||
name=item.summary,
|
||||
is_owned=item.status == TodoItemStatus.COMPLETED,
|
||||
)
|
||||
await self.coordinator.cookidoo.edit_additional_items_ownership([new_item])
|
||||
await self.coordinator.cookidoo.edit_additional_items([new_item])
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="todo_update_item_failed",
|
||||
translation_placeholders={"name": item.summary or ""},
|
||||
) from e
|
||||
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_delete_todo_items(self, uids: list[str]) -> None:
|
||||
"""Delete an item from the To-do list."""
|
||||
|
||||
try:
|
||||
await self.coordinator.cookidoo.remove_additional_items(uids)
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="todo_delete_item_failed",
|
||||
translation_placeholders={"count": str(len(uids))},
|
||||
) from e
|
||||
|
||||
await self.coordinator.async_refresh()
|
@ -49,7 +49,7 @@ class BaseCrownstoneFlowHandler(ConfigEntryBaseFlow):
|
||||
cloud: CrownstoneCloud
|
||||
|
||||
def __init__(
|
||||
self, flow_type: str, create_entry_cb: Callable[..., ConfigFlowResult]
|
||||
self, flow_type: str, create_entry_cb: Callable[[], ConfigFlowResult]
|
||||
) -> None:
|
||||
"""Set up flow instance."""
|
||||
self.flow_type = flow_type
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user