mirror of
https://github.com/home-assistant/core.git
synced 2025-09-28 22:39:22 +00:00
Compare commits
1 Commits
2024.12.0b
...
cloud_enab
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b23eacc7ad |
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -1248,11 +1248,12 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
version: v0.6.0
|
||||
|
||||
pytest-partial:
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -1386,7 +1387,8 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
version: v0.6.0
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.5
|
||||
uses: github/codeql-action/init@v3.27.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.5
|
||||
uses: github/codeql-action/analyze@v3.27.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -143,7 +143,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements.txt"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.0
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -18,7 +18,7 @@ repos:
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
@@ -83,14 +83,14 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
@@ -385,7 +385,6 @@ homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
homeassistant.components.repairs.*
|
||||
homeassistant.components.rest.*
|
||||
homeassistant.components.rest_command.*
|
||||
@@ -438,7 +437,6 @@ homeassistant.components.starlink.*
|
||||
homeassistant.components.statistics.*
|
||||
homeassistant.components.steamist.*
|
||||
homeassistant.components.stookalert.*
|
||||
homeassistant.components.stookwijzer.*
|
||||
homeassistant.components.stream.*
|
||||
homeassistant.components.streamlabswater.*
|
||||
homeassistant.components.stt.*
|
||||
|
16
.vscode/tasks.json
vendored
16
.vscode/tasks.json
vendored
@@ -87,22 +87,6 @@
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update syrupy snapshots",
|
||||
"detail": "Update syrupy snapshots for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Generate Requirements",
|
||||
"type": "shell",
|
||||
|
16
CODEOWNERS
16
CODEOWNERS
@@ -40,8 +40,6 @@ build.json @home-assistant/supervisor
|
||||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
/homeassistant/components/acaia/ @zweckj
|
||||
/tests/components/acaia/ @zweckj
|
||||
/homeassistant/components/accuweather/ @bieniu
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
@@ -588,8 +586,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/group/ @home-assistant/core
|
||||
/homeassistant/components/guardian/ @bachya
|
||||
/tests/components/guardian/ @bachya
|
||||
/homeassistant/components/habitica/ @tr4nt0r
|
||||
/tests/components/habitica/ @tr4nt0r
|
||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
||||
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||
/tests/components/hardkernel/ @home-assistant/core
|
||||
/homeassistant/components/hardware/ @home-assistant/core
|
||||
@@ -974,6 +972,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
/homeassistant/components/ness_alarm/ @nickw444
|
||||
/tests/components/ness_alarm/ @nickw444
|
||||
@@ -1344,8 +1344,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
@@ -1487,8 +1485,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
@@ -1573,8 +1571,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/unifi/ @Kane610
|
||||
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
||||
/homeassistant/components/unifiled/ @florisvdk
|
||||
/homeassistant/components/unifiprotect/ @RaHehl
|
||||
/tests/components/unifiprotect/ @RaHehl
|
||||
/homeassistant/components/upb/ @gwww
|
||||
/tests/components/upb/ @gwww
|
||||
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
||||
|
@@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.4
|
||||
RUN pip3 install uv==0.5.0
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
@@ -55,7 +55,7 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
@@ -35,9 +35,6 @@ RUN \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads
|
||||
JWT_TOKEN_CACHE_SIZE = 16
|
||||
MAX_TOKEN_SIZE = 8192
|
||||
|
||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
|
||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
|
||||
|
||||
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
||||
"require": []
|
||||
|
@@ -177,17 +177,17 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
class TotpSetupFlow(SetupFlow):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
_auth_module: TotpAuthModule
|
||||
_ota_secret: str
|
||||
_url: str
|
||||
_image: str
|
||||
|
||||
def __init__(
|
||||
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user.id)
|
||||
# to fix typing complaint
|
||||
self._auth_module: TotpAuthModule = auth_module
|
||||
self._user = user
|
||||
self._ota_secret: str = ""
|
||||
self._url: str | None = None
|
||||
self._image: str | None = None
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
@@ -214,11 +214,12 @@ class TotpSetupFlow(SetupFlow):
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
else:
|
||||
hass = self._auth_module.hass
|
||||
(
|
||||
self._ota_secret,
|
||||
self._url,
|
||||
self._image,
|
||||
) = await self._auth_module.hass.async_add_executor_job(
|
||||
) = await hass.async_add_executor_job(
|
||||
_generate_secret_and_qr_code,
|
||||
str(self._user.name),
|
||||
)
|
||||
|
@@ -515,7 +515,7 @@ async def async_from_config_dict(
|
||||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
"python_version",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "sky",
|
||||
"name": "Sky",
|
||||
"integrations": ["sky_hub", "sky_remote"]
|
||||
}
|
@@ -112,6 +112,9 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=vol.Schema(self.data_schema)
|
||||
|
@@ -9,6 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["jaraco.abode", "lomond"],
|
||||
"requirements": ["jaraco.abode==6.2.1"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["jaraco.abode==6.2.1"]
|
||||
}
|
||||
|
@@ -28,6 +28,7 @@
|
||||
"invalid_mfa_code": "Invalid MFA code"
|
||||
},
|
||||
"abort": {
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
|
@@ -1,31 +0,0 @@
|
||||
"""Initialize the Acaia component."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Set up acaia as config entry."""
|
||||
|
||||
coordinator = AcaiaCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
@@ -1,58 +0,0 @@
|
||||
"""Binary sensor platform for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Description for Acaia binary sensor entities."""
|
||||
|
||||
is_on_fn: Callable[[AcaiaScale], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = (
|
||||
AcaiaBinarySensorEntityDescription(
|
||||
key="timer_running",
|
||||
translation_key="timer_running",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
is_on_fn=lambda scale: scale.timer_running,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up binary sensors."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity):
|
||||
"""Representation of an Acaia binary sensor."""
|
||||
|
||||
entity_description: AcaiaBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.is_on_fn(self._scale)
|
@@ -1,63 +0,0 @@
|
||||
"""Button entities for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for acaia button entities."""
|
||||
|
||||
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||
AcaiaButtonEntityDescription(
|
||||
key="tare",
|
||||
translation_key="tare",
|
||||
press_fn=lambda scale: scale.tare(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="reset_timer",
|
||||
translation_key="reset_timer",
|
||||
press_fn=lambda scale: scale.reset_timer(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="start_stop",
|
||||
translation_key="start_stop",
|
||||
press_fn=lambda scale: scale.start_stop_timer(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||
|
||||
|
||||
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||
"""Representation of an Acaia button."""
|
||||
|
||||
entity_description: AcaiaButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.press_fn(self._scale)
|
@@ -1,149 +0,0 @@
|
||||
"""Config flow for Acaia integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
from aioacaia.helpers import is_new_scale
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for acaia."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered: dict[str, Any] = {}
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = user_input[CONF_ADDRESS]
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
errors["base"] = "device_not_found"
|
||||
except AcaiaError:
|
||||
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||
errors["base"] = "unknown"
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(format_mac(mac))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[mac],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
},
|
||||
)
|
||||
|
||||
for device in async_discovered_service_info(self.hass):
|
||||
self._discovered_devices[device.address] = device.name
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=device_mac,
|
||||
label=f"{device_name} ({device_mac})",
|
||||
)
|
||||
for device_mac, device_name in self._discovered_devices.items()
|
||||
]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = discovery_info.address
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(format_mac(discovery_info.address))
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||
discovery_info.address
|
||||
)
|
||||
except AcaiaDeviceNotFound:
|
||||
_LOGGER.debug("Device not found during discovery")
|
||||
return self.async_abort(reason="device_not_found")
|
||||
except AcaiaError:
|
||||
_LOGGER.debug(
|
||||
"Error occurred while connecting to the scale during discovery",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
except AcaiaUnknownDevice:
|
||||
_LOGGER.debug("Unsupported device during discovery")
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle confirmation of Bluetooth discovery."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered[CONF_NAME],
|
||||
data={
|
||||
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||
},
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = placeholders = {
|
||||
CONF_NAME: self._discovered[CONF_NAME]
|
||||
}
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
description_placeholders=placeholders,
|
||||
)
|
@@ -1,4 +0,0 @@
|
||||
"""Constants for component."""
|
||||
|
||||
DOMAIN = "acaia"
|
||||
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
@@ -1,86 +0,0 @@
|
||||
"""Coordinator for Acaia integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||
|
||||
|
||||
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to handle fetching data from the scale."""
|
||||
|
||||
config_entry: AcaiaConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="acaia coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def scale(self) -> AcaiaScale:
|
||||
"""Return the scale object."""
|
||||
return self._scale
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
|
||||
# scale is already connected, return
|
||||
if self._scale.connected:
|
||||
return
|
||||
|
||||
# scale is not connected, try to connect
|
||||
try:
|
||||
await self._scale.connect(setup_tasks=False)
|
||||
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to scale: %s, Error: %s",
|
||||
self.config_entry.data[CONF_ADDRESS],
|
||||
ex,
|
||||
)
|
||||
self._scale.device_disconnected_handler(notify=False)
|
||||
return
|
||||
|
||||
# connected, set up background tasks
|
||||
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.send_heartbeats(),
|
||||
name="acaia_heartbeat_task",
|
||||
)
|
||||
|
||||
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||
self._scale.process_queue_task = (
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.process_queue(),
|
||||
name="acaia_process_queue_task",
|
||||
)
|
||||
)
|
@@ -1,31 +0,0 @@
|
||||
"""Diagnostics support for Acaia."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AcaiaConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
scale = coordinator.scale
|
||||
|
||||
# collect all data sources
|
||||
return {
|
||||
"model": scale.model,
|
||||
"device_state": (
|
||||
asdict(scale.device_state) if scale.device_state is not None else ""
|
||||
),
|
||||
"mac": scale.mac,
|
||||
"last_disconnect_time": scale.last_disconnect_time,
|
||||
"timer": scale.timer,
|
||||
"weight": scale.weight,
|
||||
}
|
@@ -1,46 +0,0 @@
|
||||
"""Base class for Acaia entities."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AcaiaCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AcaiaCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
formatted_mac = format_mac(self._scale.mac)
|
||||
self._attr_unique_id = f"{formatted_mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, formatted_mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
connections={(CONNECTION_BLUETOOTH, self._scale.mac)},
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Returns whether entity is available."""
|
||||
return super().available and self._scale.connected
|
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"timer_running": {
|
||||
"default": "mdi:timer",
|
||||
"state": {
|
||||
"on": "mdi:timer-play",
|
||||
"off": "mdi:timer-off"
|
||||
}
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"tare": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"reset_timer": {
|
||||
"default": "mdi:timer-refresh"
|
||||
},
|
||||
"start_stop": {
|
||||
"default": "mdi:timer-play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"domain": "acaia",
|
||||
"name": "Acaia",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 16962
|
||||
},
|
||||
{
|
||||
"local_name": "ACAIA*"
|
||||
},
|
||||
{
|
||||
"local_name": "PYXIS-*"
|
||||
},
|
||||
{
|
||||
"local_name": "LUNAR-*"
|
||||
},
|
||||
{
|
||||
"local_name": "PROCHBT001"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.10"]
|
||||
}
|
@@ -1,106 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No explicit event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device is expected to be offline most of the time, but needs to connect quickly once available.
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
Handled by coordinator.
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
No authentication required.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
No IP discovery.
|
||||
discovery:
|
||||
status: done
|
||||
comment: |
|
||||
Bluetooth discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
No noisy/non-essential entities.
|
||||
entity-translations: done
|
||||
exception-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom exceptions.
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
Only parameter that could be changed (MAC = unique_id) would force a new config entry.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No repairs/issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Bluetooth connection.
|
||||
strict-typing: done
|
@@ -1,143 +0,0 @@
|
||||
"""Sensor platform for Acaia."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale
|
||||
from aioacaia.const import UnitMass as AcaiaUnitOfMass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
RestoreSensor,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorExtraStoredData,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaSensorEntityDescription(SensorEntityDescription):
|
||||
"""Description for Acaia sensor entities."""
|
||||
|
||||
value_fn: Callable[[AcaiaScale], int | float | None]
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription):
|
||||
"""Description for Acaia sensor entities with dynamic units."""
|
||||
|
||||
unit_fn: Callable[[AcaiaDeviceState], str] | None = None
|
||||
|
||||
|
||||
SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||
AcaiaDynamicUnitSensorEntityDescription(
|
||||
key="weight",
|
||||
device_class=SensorDeviceClass.WEIGHT,
|
||||
native_unit_of_measurement=UnitOfMass.GRAMS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
unit_fn=lambda data: (
|
||||
UnitOfMass.OUNCES
|
||||
if data.units == AcaiaUnitOfMass.OUNCES
|
||||
else UnitOfMass.GRAMS
|
||||
),
|
||||
value_fn=lambda scale: scale.weight,
|
||||
),
|
||||
AcaiaDynamicUnitSensorEntityDescription(
|
||||
key="flow_rate",
|
||||
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
|
||||
suggested_display_precision=1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda scale: scale.flow_rate,
|
||||
),
|
||||
)
|
||||
RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||
AcaiaSensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda scale: (
|
||||
scale.device_state.battery_level if scale.device_state else None
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities: list[SensorEntity] = [
|
||||
AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS
|
||||
]
|
||||
entities.extend(
|
||||
AcaiaRestoreSensor(coordinator, entity_description)
|
||||
for entity_description in RESTORE_SENSORS
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AcaiaSensor(AcaiaEntity, SensorEntity):
|
||||
"""Representation of an Acaia sensor."""
|
||||
|
||||
entity_description: AcaiaDynamicUnitSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement of this entity."""
|
||||
if (
|
||||
self._scale.device_state is not None
|
||||
and self.entity_description.unit_fn is not None
|
||||
):
|
||||
return self.entity_description.unit_fn(self._scale.device_state)
|
||||
return self.entity_description.native_unit_of_measurement
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
"""Return the state of the entity."""
|
||||
return self.entity_description.value_fn(self._scale)
|
||||
|
||||
|
||||
class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
|
||||
"""Representation of an Acaia sensor with restore capabilities."""
|
||||
|
||||
entity_description: AcaiaSensorEntityDescription
|
||||
_restored_data: SensorExtraStoredData | None = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self._restored_data = await self.async_get_last_sensor_data()
|
||||
if self._restored_data is not None:
|
||||
self._attr_native_value = self._restored_data.native_value
|
||||
self._attr_native_unit_of_measurement = (
|
||||
self._restored_data.native_unit_of_measurement
|
||||
)
|
||||
|
||||
if self._scale.device_state is not None:
|
||||
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if self._scale.device_state is not None:
|
||||
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||
self._async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available or self._restored_data is not None
|
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"unsupported_device": "This device is not supported."
|
||||
},
|
||||
"error": {
|
||||
"device_not_found": "Device could not be found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
},
|
||||
"data_description": {
|
||||
"address": "Select Acaia scale you want to set up"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"timer_running": {
|
||||
"name": "Timer running"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"tare": {
|
||||
"name": "Tare"
|
||||
},
|
||||
"reset_timer": {
|
||||
"name": "Reset timer"
|
||||
},
|
||||
"start_stop": {
|
||||
"name": "Start/stop timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,6 +7,7 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.0.0"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["accuweather==3.0.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -4,6 +4,5 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyserial==3.5"]
|
||||
}
|
||||
|
@@ -3,6 +3,5 @@
|
||||
"name": "Actiontec",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/actiontec",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy"
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@ STATE_KEY_POSITION = "position"
|
||||
|
||||
PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ads",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyads"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyads==3.4.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["advantage_air"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["advantage-air==0.4.4"]
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@
|
||||
import logging
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
@@ -23,11 +23,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
api_key = entry.data[CONF_API_KEY]
|
||||
latitude = entry.data[CONF_LATITUDE]
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
update_features: int = UpdateFeature.FORECAST
|
||||
if entry.options.get(CONF_STATION_UPDATES, True):
|
||||
update_features |= UpdateFeature.STATION
|
||||
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
||||
|
||||
options = ConnectionOptions(api_key, update_features)
|
||||
options = ConnectionOptions(api_key, station_updates)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||
try:
|
||||
await aemet.select_coordinates(latitude, longitude)
|
||||
|
@@ -45,7 +45,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
options = ConnectionOptions(user_input[CONF_API_KEY])
|
||||
options = ConnectionOptions(user_input[CONF_API_KEY], False)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
||||
try:
|
||||
await aemet.select_coordinates(latitude, longitude)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.3"]
|
||||
"requirements": ["AEMET-OpenData==0.5.4"]
|
||||
}
|
||||
|
@@ -1,80 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -7,5 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["airly"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["airly==1.1.0"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.3"]
|
||||
"requirements": ["aioairq==0.3.2"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["airtouch5py"],
|
||||
"requirements": ["airtouch5py==0.2.11"]
|
||||
"requirements": ["airtouch5py==0.2.10"]
|
||||
}
|
||||
|
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
"requirements": ["aioairzone==0.9.5"]
|
||||
}
|
||||
|
@@ -6,7 +6,7 @@ import asyncio
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
from typing import Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -35,7 +35,6 @@ from homeassistant.helpers.deprecation import (
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
from homeassistant.helpers.frame import ReportBehavior, report_usage
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -164,6 +163,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
_alarm_control_panel_option_default_code: str | None = None
|
||||
|
||||
__alarm_legacy_state: bool = False
|
||||
__alarm_legacy_state_reported: bool = False
|
||||
|
||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||
"""Post initialisation processing."""
|
||||
@@ -173,15 +173,17 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
# setting the state directly.
|
||||
cls.__alarm_legacy_state = True
|
||||
|
||||
def __setattr__(self, name: str, value: Any, /) -> None:
|
||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||
"""Set attribute.
|
||||
|
||||
Deprecation warning if setting '_attr_state' directly
|
||||
unless already reported.
|
||||
"""
|
||||
if name == "_attr_state":
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
return super().__setattr__(name, value)
|
||||
if __name == "_attr_state":
|
||||
if self.__alarm_legacy_state_reported is not True:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
self.__alarm_legacy_state_reported = True
|
||||
return super().__setattr__(__name, __value)
|
||||
|
||||
@callback
|
||||
def add_to_platform_start(
|
||||
@@ -192,7 +194,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
) -> None:
|
||||
"""Start adding an entity to a platform."""
|
||||
super().add_to_platform_start(hass, platform, parallel_updates)
|
||||
if self.__alarm_legacy_state:
|
||||
if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
|
||||
@callback
|
||||
@@ -201,30 +203,27 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
Integrations should implement alarm_state instead of using state directly.
|
||||
"""
|
||||
report_usage(
|
||||
"is setting state directly."
|
||||
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
|
||||
" property and return its state using the AlarmControlPanelState enum",
|
||||
core_integration_behavior=ReportBehavior.ERROR,
|
||||
custom_integration_behavior=ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.11",
|
||||
integration_domain=self.platform.platform_name if self.platform else None,
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
self.__alarm_legacy_state_reported = True
|
||||
if "custom_components" in type(self).__module__:
|
||||
# Do not report on core integrations as they have been fixed.
|
||||
report_issue = "report it to the custom integration author."
|
||||
_LOGGER.warning(
|
||||
"Entity %s (%s) is setting state directly"
|
||||
" which will stop working in HA Core 2025.11."
|
||||
" Entities should implement the 'alarm_state' property and"
|
||||
" return its state using the AlarmControlPanelState enum, please %s",
|
||||
self.entity_id,
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (alarm_state := self.alarm_state) is not None:
|
||||
return alarm_state
|
||||
if self._attr_state is not None:
|
||||
# Backwards compatibility for integrations that set state directly
|
||||
# Should be removed in 2025.11
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._attr_state, str)
|
||||
return self._attr_state
|
||||
return None
|
||||
if (alarm_state := self.alarm_state) is None:
|
||||
return None
|
||||
return alarm_state
|
||||
|
||||
@cached_property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
@@ -270,6 +269,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
"""Check if arm code is required, raise if no code is given."""
|
||||
if not (_code := self.code_or_default_code(code)) and self.code_arm_required:
|
||||
raise ServiceValidationError(
|
||||
f"Arming requires a code but none was given for {self.entity_id}",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="code_arm_required",
|
||||
translation_placeholders={
|
||||
|
@@ -130,7 +130,7 @@
|
||||
},
|
||||
"alarm_trigger": {
|
||||
"name": "Trigger",
|
||||
"description": "Trigger the alarm manually.",
|
||||
"description": "Enables an external alarm trigger.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
|
||||
@@ -138,10 +138,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"code_arm_required": {
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -816,19 +816,13 @@ class AlexaPlaybackController(AlexaCapability):
|
||||
"""
|
||||
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
|
||||
operations: dict[
|
||||
cover.CoverEntityFeature | media_player.MediaPlayerEntityFeature, str
|
||||
]
|
||||
if self.entity.domain == cover.DOMAIN:
|
||||
operations = {cover.CoverEntityFeature.STOP: "Stop"}
|
||||
else:
|
||||
operations = {
|
||||
media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next",
|
||||
media_player.MediaPlayerEntityFeature.PAUSE: "Pause",
|
||||
media_player.MediaPlayerEntityFeature.PLAY: "Play",
|
||||
media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous",
|
||||
media_player.MediaPlayerEntityFeature.STOP: "Stop",
|
||||
}
|
||||
operations = {
|
||||
media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next",
|
||||
media_player.MediaPlayerEntityFeature.PAUSE: "Pause",
|
||||
media_player.MediaPlayerEntityFeature.PLAY: "Play",
|
||||
media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous",
|
||||
media_player.MediaPlayerEntityFeature.STOP: "Stop",
|
||||
}
|
||||
|
||||
return [
|
||||
value
|
||||
|
@@ -559,10 +559,6 @@ class CoverCapabilities(AlexaEntity):
|
||||
)
|
||||
if supported & cover.CoverEntityFeature.SET_TILT_POSITION:
|
||||
yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt")
|
||||
if supported & (
|
||||
cover.CoverEntityFeature.STOP | cover.CoverEntityFeature.STOP_TILT
|
||||
):
|
||||
yield AlexaPlaybackController(self.entity, instance=f"{cover.DOMAIN}.stop")
|
||||
yield AlexaEndpointHealth(self.hass, self.entity)
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import logging
|
||||
import math
|
||||
@@ -765,25 +764,9 @@ async def async_api_stop(
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
if entity.domain == cover.DOMAIN:
|
||||
supported: int = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
feature_services: dict[int, str] = {
|
||||
cover.CoverEntityFeature.STOP.value: cover.SERVICE_STOP_COVER,
|
||||
cover.CoverEntityFeature.STOP_TILT.value: cover.SERVICE_STOP_COVER_TILT,
|
||||
}
|
||||
await asyncio.gather(
|
||||
*(
|
||||
hass.services.async_call(
|
||||
entity.domain, service, data, blocking=False, context=context
|
||||
)
|
||||
for feature, service in feature_services.items()
|
||||
if feature & supported
|
||||
)
|
||||
)
|
||||
else:
|
||||
await hass.services.async_call(
|
||||
entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context
|
||||
)
|
||||
await hass.services.async_call(
|
||||
entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context
|
||||
)
|
||||
|
||||
return directive.response()
|
||||
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/alpha_vantage",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["alpha_vantage"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["alpha-vantage==2.3.1"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/amazon_polly",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["boto3==1.34.131"]
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Support for Amber Electric."""
|
||||
|
||||
import amberelectric
|
||||
from amberelectric import Configuration
|
||||
from amberelectric.api import amber_api
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
@@ -14,9 +15,8 @@ type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
|
||||
"""Set up Amber Electric from a config entry."""
|
||||
configuration = amberelectric.Configuration(access_token=entry.data[CONF_API_TOKEN])
|
||||
api_client = amberelectric.ApiClient(configuration)
|
||||
api_instance = amberelectric.AmberApi(api_client)
|
||||
configuration = Configuration(access_token=entry.data[CONF_API_TOKEN])
|
||||
api_instance = amber_api.AmberApi.create(configuration)
|
||||
site_id = entry.data[CONF_SITE_ID]
|
||||
|
||||
coordinator = AmberUpdateCoordinator(hass, api_instance, site_id)
|
||||
|
@@ -3,8 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import amberelectric
|
||||
from amberelectric.models.site import Site
|
||||
from amberelectric.models.site_status import SiteStatus
|
||||
from amberelectric.api import amber_api
|
||||
from amberelectric.model.site import Site, SiteStatus
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
@@ -23,15 +23,11 @@ API_URL = "https://app.amber.com.au/developers"
|
||||
|
||||
def generate_site_selector_name(site: Site) -> str:
|
||||
"""Generate the name to show in the site drop down in the configuration flow."""
|
||||
# For some reason the generated API key returns this as any, not a string. Thanks pydantic
|
||||
nmi = str(site.nmi)
|
||||
if site.status == SiteStatus.CLOSED:
|
||||
if site.closed_on is None:
|
||||
return f"{nmi} (Closed)"
|
||||
return f"{nmi} (Closed: {site.closed_on.isoformat()})"
|
||||
return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return]
|
||||
if site.status == SiteStatus.PENDING:
|
||||
return f"{nmi} (Pending)"
|
||||
return nmi
|
||||
return site.nmi + " (Pending)" # type: ignore[no-any-return]
|
||||
return site.nmi # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def filter_sites(sites: list[Site]) -> list[Site]:
|
||||
@@ -39,7 +35,7 @@ def filter_sites(sites: list[Site]) -> list[Site]:
|
||||
filtered: list[Site] = []
|
||||
filtered_nmi: set[str] = set()
|
||||
|
||||
for site in sorted(sites, key=lambda site: site.status):
|
||||
for site in sorted(sites, key=lambda site: site.status.value):
|
||||
if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi:
|
||||
filtered.append(site)
|
||||
filtered_nmi.add(site.nmi)
|
||||
@@ -60,8 +56,7 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def _fetch_sites(self, token: str) -> list[Site] | None:
|
||||
configuration = amberelectric.Configuration(access_token=token)
|
||||
api_client = amberelectric.ApiClient(configuration)
|
||||
api = amberelectric.AmberApi(api_client)
|
||||
api: amber_api.AmberApi = amber_api.AmberApi.create(configuration)
|
||||
|
||||
try:
|
||||
sites: list[Site] = filter_sites(api.get_sites())
|
||||
|
@@ -5,13 +5,13 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
import amberelectric
|
||||
from amberelectric.models.actual_interval import ActualInterval
|
||||
from amberelectric.models.channel import ChannelType
|
||||
from amberelectric.models.current_interval import CurrentInterval
|
||||
from amberelectric.models.forecast_interval import ForecastInterval
|
||||
from amberelectric.models.price_descriptor import PriceDescriptor
|
||||
from amberelectric.rest import ApiException
|
||||
from amberelectric import ApiException
|
||||
from amberelectric.api import amber_api
|
||||
from amberelectric.model.actual_interval import ActualInterval
|
||||
from amberelectric.model.channel import ChannelType
|
||||
from amberelectric.model.current_interval import CurrentInterval
|
||||
from amberelectric.model.forecast_interval import ForecastInterval
|
||||
from amberelectric.model.interval import Descriptor
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -31,22 +31,22 @@ def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) -
|
||||
|
||||
def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
|
||||
"""Return true if the supplied interval is on the general channel."""
|
||||
return interval.channel_type == ChannelType.GENERAL
|
||||
return interval.channel_type == ChannelType.GENERAL # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def is_controlled_load(
|
||||
interval: ActualInterval | CurrentInterval | ForecastInterval,
|
||||
) -> bool:
|
||||
"""Return true if the supplied interval is on the controlled load channel."""
|
||||
return interval.channel_type == ChannelType.CONTROLLEDLOAD
|
||||
return interval.channel_type == ChannelType.CONTROLLED_LOAD # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
|
||||
"""Return true if the supplied interval is on the feed in channel."""
|
||||
return interval.channel_type == ChannelType.FEEDIN
|
||||
return interval.channel_type == ChannelType.FEED_IN # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
|
||||
def normalize_descriptor(descriptor: Descriptor) -> str | None:
|
||||
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
|
||||
if descriptor is None:
|
||||
return None
|
||||
@@ -71,7 +71,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: amberelectric.AmberApi, site_id: str
|
||||
self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str
|
||||
) -> None:
|
||||
"""Initialise the data service."""
|
||||
super().__init__(
|
||||
@@ -93,13 +93,12 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
"grid": {},
|
||||
}
|
||||
try:
|
||||
data = self._api.get_current_prices(self.site_id, next=48)
|
||||
intervals = [interval.actual_instance for interval in data]
|
||||
data = self._api.get_current_price(self.site_id, next=48)
|
||||
except ApiException as api_exception:
|
||||
raise UpdateFailed("Missing price data, skipping update") from api_exception
|
||||
|
||||
current = [interval for interval in intervals if is_current(interval)]
|
||||
forecasts = [interval for interval in intervals if is_forecast(interval)]
|
||||
current = [interval for interval in data if is_current(interval)]
|
||||
forecasts = [interval for interval in data if is_forecast(interval)]
|
||||
general = [interval for interval in current if is_general(interval)]
|
||||
|
||||
if len(general) == 0:
|
||||
@@ -138,7 +137,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
|
||||
interval for interval in forecasts if is_feed_in(interval)
|
||||
]
|
||||
|
||||
LOGGER.debug("Fetched new Amber data: %s", intervals)
|
||||
LOGGER.debug("Fetched new Amber data: %s", data)
|
||||
return result
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/amberelectric",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["amberelectric"],
|
||||
"requirements": ["amberelectric==2.0.12"]
|
||||
"requirements": ["amberelectric==1.1.1"]
|
||||
}
|
||||
|
@@ -8,9 +8,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from amberelectric.models.channel import ChannelType
|
||||
from amberelectric.models.current_interval import CurrentInterval
|
||||
from amberelectric.models.forecast_interval import ForecastInterval
|
||||
from amberelectric.model.channel import ChannelType
|
||||
from amberelectric.model.current_interval import CurrentInterval
|
||||
from amberelectric.model.forecast_interval import ForecastInterval
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
@@ -52,7 +52,7 @@ class AmberSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity):
|
||||
self,
|
||||
coordinator: AmberUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
channel_type: str,
|
||||
channel_type: ChannelType,
|
||||
) -> None:
|
||||
"""Initialize the Sensor."""
|
||||
super().__init__(coordinator)
|
||||
@@ -73,7 +73,7 @@ class AmberPriceSensor(AmberSensor):
|
||||
"""Return the current price in $/kWh."""
|
||||
interval = self.coordinator.data[self.entity_description.key][self.channel_type]
|
||||
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
if interval.channel_type == ChannelType.FEED_IN:
|
||||
return format_cents_to_dollars(interval.per_kwh) * -1
|
||||
return format_cents_to_dollars(interval.per_kwh)
|
||||
|
||||
@@ -87,9 +87,9 @@ class AmberPriceSensor(AmberSensor):
|
||||
return data
|
||||
|
||||
data["duration"] = interval.duration
|
||||
data["date"] = interval.var_date.isoformat()
|
||||
data["date"] = interval.date.isoformat()
|
||||
data["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
if interval.channel_type == ChannelType.FEED_IN:
|
||||
data["per_kwh"] = data["per_kwh"] * -1
|
||||
data["nem_date"] = interval.nem_time.isoformat()
|
||||
data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||
@@ -120,7 +120,7 @@ class AmberForecastSensor(AmberSensor):
|
||||
return None
|
||||
interval = intervals[0]
|
||||
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
if interval.channel_type == ChannelType.FEED_IN:
|
||||
return format_cents_to_dollars(interval.per_kwh) * -1
|
||||
return format_cents_to_dollars(interval.per_kwh)
|
||||
|
||||
@@ -142,10 +142,10 @@ class AmberForecastSensor(AmberSensor):
|
||||
for interval in intervals:
|
||||
datum = {}
|
||||
datum["duration"] = interval.duration
|
||||
datum["date"] = interval.var_date.isoformat()
|
||||
datum["date"] = interval.date.isoformat()
|
||||
datum["nem_date"] = interval.nem_time.isoformat()
|
||||
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
|
||||
if interval.channel_type == ChannelType.FEEDIN:
|
||||
if interval.channel_type == ChannelType.FEED_IN:
|
||||
datum["per_kwh"] = datum["per_kwh"] * -1
|
||||
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
|
||||
datum["start_time"] = interval.start_time.isoformat()
|
||||
|
@@ -6,6 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/amcrest",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["amcrest"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["amcrest==1.9.8"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ampio",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["asmog"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["asmog==0.0.6"]
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@
|
||||
"loggers": ["adb_shell", "androidtv", "pure_python_adb"],
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.4.4",
|
||||
"androidtv[async]==0.0.75",
|
||||
"androidtv[async]==0.0.73",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
]
|
||||
}
|
||||
|
@@ -7,6 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.1.2"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["anel_pwrctrl"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.11"]
|
||||
"requirements": ["py-aosmith==1.0.10"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apache_kafka",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiokafka", "kafka_python"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aiokafka==0.10.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.0"],
|
||||
"requirements": ["pyatv==0.15.1"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aprs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aprslib", "geographiclib", "geopy"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aprslib==0.7.2", "geopy==2.3.0"]
|
||||
}
|
||||
|
@@ -38,7 +38,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ApSystemsConfigEntry) ->
|
||||
ip_address=entry.data[CONF_IP_ADDRESS],
|
||||
port=entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
timeout=8,
|
||||
enable_debounce=True,
|
||||
)
|
||||
coordinator = ApSystemsDataCoordinator(hass, api)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
@@ -5,17 +5,12 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from APsystemsEZ1 import (
|
||||
APsystemsEZ1M,
|
||||
InverterReturnedError,
|
||||
ReturnAlarmInfo,
|
||||
ReturnOutputData,
|
||||
)
|
||||
from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import LOGGER
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -48,11 +43,6 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
self.api.min_power = device_info.minPower
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
output_data = await self.api.get_output_data()
|
||||
alarm_info = await self.api.get_alarm_info()
|
||||
except InverterReturnedError:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN, translation_key="inverter_error"
|
||||
) from None
|
||||
output_data = await self.api.get_output_data()
|
||||
alarm_info = await self.api.get_alarm_info()
|
||||
return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
"requirements": ["apsystems-ez1==2.2.1"]
|
||||
}
|
||||
|
@@ -72,10 +72,5 @@
|
||||
"name": "Inverter status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"inverter_error": {
|
||||
"message": "Inverter returned an error"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError
|
||||
from APsystemsEZ1 import InverterReturnedError
|
||||
|
||||
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -41,7 +40,7 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
"""Update switch status and availability."""
|
||||
try:
|
||||
status = await self._api.get_device_power_status()
|
||||
except (TimeoutError, ClientConnectionError, InverterReturnedError):
|
||||
except (TimeoutError, ClientConnectionError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aqualogic",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aqualogic"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aqualogic==2.6"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aquostv",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["sharp_aquos_rc"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["sharp_aquos_rc==0.3.2"]
|
||||
}
|
||||
|
@@ -3,6 +3,5 @@
|
||||
"name": "aREST",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/arest",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy"
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
@@ -6,6 +6,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arris_tg2492lg"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["arris-tg2492lg==2.2.0"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aruba",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
}
|
||||
|
@@ -4,6 +4,5 @@
|
||||
"codeowners": [],
|
||||
"dependencies": ["mqtt"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/arwn",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy"
|
||||
"iot_class": "local_polling"
|
||||
}
|
||||
|
@@ -31,7 +31,6 @@ from homeassistant.components.tts import (
|
||||
)
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.collection import (
|
||||
CHANGE_UPDATED,
|
||||
CollectionError,
|
||||
@@ -110,7 +109,6 @@ PIPELINE_FIELDS: VolDictType = {
|
||||
vol.Required("tts_voice"): vol.Any(str, None),
|
||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||
vol.Optional("prefer_local_intents"): bool,
|
||||
}
|
||||
|
||||
STORED_PIPELINE_RUNS = 10
|
||||
@@ -324,7 +322,6 @@ async def async_update_pipeline(
|
||||
tts_voice: str | None | UndefinedType = UNDEFINED,
|
||||
wake_word_entity: str | None | UndefinedType = UNDEFINED,
|
||||
wake_word_id: str | None | UndefinedType = UNDEFINED,
|
||||
prefer_local_intents: bool | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update a pipeline."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
@@ -348,7 +345,6 @@ async def async_update_pipeline(
|
||||
("tts_voice", tts_voice),
|
||||
("wake_word_entity", wake_word_entity),
|
||||
("wake_word_id", wake_word_id),
|
||||
("prefer_local_intents", prefer_local_intents),
|
||||
)
|
||||
if val is not UNDEFINED
|
||||
}
|
||||
@@ -402,7 +398,6 @@ class Pipeline:
|
||||
tts_voice: str | None
|
||||
wake_word_entity: str | None
|
||||
wake_word_id: str | None
|
||||
prefer_local_intents: bool = False
|
||||
|
||||
id: str = field(default_factory=ulid_util.ulid_now)
|
||||
|
||||
@@ -426,7 +421,6 @@ class Pipeline:
|
||||
tts_voice=data["tts_voice"],
|
||||
wake_word_entity=data["wake_word_entity"],
|
||||
wake_word_id=data["wake_word_id"],
|
||||
prefer_local_intents=data.get("prefer_local_intents", False),
|
||||
)
|
||||
|
||||
def to_json(self) -> dict[str, Any]:
|
||||
@@ -444,7 +438,6 @@ class Pipeline:
|
||||
"tts_voice": self.tts_voice,
|
||||
"wake_word_entity": self.wake_word_entity,
|
||||
"wake_word_id": self.wake_word_id,
|
||||
"prefer_local_intents": self.prefer_local_intents,
|
||||
}
|
||||
|
||||
|
||||
@@ -1018,64 +1011,20 @@ class PipelineRun:
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
user_input = conversation.ConversationInput(
|
||||
conversation_result = await conversation.async_converse(
|
||||
hass=self.hass,
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=self.pipeline.language,
|
||||
context=self.context,
|
||||
language=self.pipeline.conversation_language,
|
||||
agent_id=self.intent_agent,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
conversation_result: conversation.ConversationResult | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
trigger_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
trigger_response.async_set_speech(trigger_response_text)
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=trigger_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass, user_input
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
processed_locally = True
|
||||
|
||||
if conversation_result is None:
|
||||
# Fall back to pipeline conversation agent
|
||||
conversation_result = await conversation.async_converse(
|
||||
hass=self.hass,
|
||||
text=user_input.text,
|
||||
conversation_id=user_input.conversation_id,
|
||||
device_id=user_input.device_id,
|
||||
context=user_input.context,
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1088,10 +1037,7 @@ class PipelineRun:
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_END,
|
||||
{
|
||||
"processed_locally": processed_locally,
|
||||
"intent_output": conversation_result.as_dict(),
|
||||
},
|
||||
{"intent_output": conversation_result.as_dict()},
|
||||
)
|
||||
)
|
||||
|
||||
|
@@ -4,6 +4,5 @@
|
||||
"codeowners": ["@mtdcr"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aten_pe",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["atenpdu==0.3.2"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/atome",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyatome"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyAtome==0.1.1"]
|
||||
}
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.0"]
|
||||
}
|
||||
|
@@ -22,14 +22,13 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_reauth_username: str
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.data: dict = {}
|
||||
self.options: dict = {CONF_SERVICES: []}
|
||||
self.services: list[dict[str, Any]] = []
|
||||
self.client: AussieBB | None = None
|
||||
self._reauth_username: str | None = None
|
||||
|
||||
async def async_auth(self, user_input: dict[str, str]) -> dict[str, str] | None:
|
||||
"""Reusable Auth Helper."""
|
||||
@@ -93,7 +92,7 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
errors: dict[str, str] | None = None
|
||||
|
||||
if user_input:
|
||||
if user_input and self._reauth_username:
|
||||
data = {
|
||||
CONF_USERNAME: self._reauth_username,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
|
@@ -1,99 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: todo
|
||||
comment: |
|
||||
The entity.py file is not used in this integration.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only polls data using a coordinator.
|
||||
Since the integration is read-only and poll-only (only provide sensor
|
||||
data), there is no need to implement parallel updates.
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a service
|
||||
provider, which uses the users home address to get the data.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a service
|
||||
provider, which uses the users home address to get the data.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This is an service, which doesn't integrate with any devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any entities that should disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/avea",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["avea"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["avea==1.5.1"]
|
||||
}
|
||||
|
@@ -4,6 +4,5 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/avion",
|
||||
"iot_class": "assumed_state",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["avion==0.10"]
|
||||
}
|
||||
|
@@ -14,4 +14,7 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import a config entry."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
return self.async_create_entry(title="configuration.yaml", data=import_data)
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aws",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore", "botocore"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"]
|
||||
}
|
||||
|
@@ -29,6 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["axis==63"],
|
||||
"ssdp": [
|
||||
{
|
||||
|
@@ -102,6 +102,8 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial user step."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id=STEP_USER, data_schema=BASE_SCHEMA)
|
||||
|
||||
@@ -158,6 +160,8 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from configuration.yaml."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if CONF_SEND_INTERVAL in import_data:
|
||||
self._options[CONF_SEND_INTERVAL] = import_data.pop(CONF_SEND_INTERVAL)
|
||||
if CONF_MAX_DELAY in import_data:
|
||||
|
@@ -6,6 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_event_hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["azure"],
|
||||
"requirements": ["azure-eventhub==5.11.1"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["azure-eventhub==5.11.1"]
|
||||
}
|
||||
|
@@ -31,6 +31,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"cannot_connect": "Connecting with the credentials from the configuration.yaml failed, please remove from yaml and use the config flow.",
|
||||
"unknown": "Connecting with the credentials from the configuration.yaml failed with an unknown error, please remove from yaml and use the config flow."
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_service_bus",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["azure"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["azure-servicebus==7.10.0"]
|
||||
}
|
||||
|
@@ -2,26 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .const import DOMAIN
|
||||
from .manager import BaseBackupManager
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
"""Register the http views."""
|
||||
hass.http.register_view(DownloadBackupView)
|
||||
hass.http.register_view(UploadBackupView)
|
||||
|
||||
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
@@ -39,7 +36,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN]
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
@@ -51,29 +48,3 @@ class DownloadBackupView(HomeAssistantView):
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/upload"
|
||||
name = "api:backup:upload"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
||||
|
||||
return Response(status=HTTPStatus.CREATED)
|
||||
|
@@ -9,15 +9,11 @@ import hashlib
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import shutil
|
||||
import tarfile
|
||||
from tarfile import TarError
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
@@ -151,15 +147,6 @@ class BaseBackupManager(abc.ABC):
|
||||
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||
"""Remove a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
|
||||
|
||||
class BackupManager(BaseBackupManager):
|
||||
"""Backup manager for the Backup integration."""
|
||||
@@ -235,63 +222,6 @@ class BackupManager(BaseBackupManager):
|
||||
LOGGER.debug("Removed backup located at %s", backup.path)
|
||||
self.backups.pop(slug)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
||||
SimpleQueue()
|
||||
)
|
||||
temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory)
|
||||
target_temp_file = Path(
|
||||
temp_dir_handler.name, contents.filename or "backup.tar"
|
||||
)
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with target_temp_file.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
self.hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = self.hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = self.hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
||||
|
||||
def _move_and_cleanup() -> None:
|
||||
shutil.move(target_temp_file, self.backup_dir / target_temp_file.name)
|
||||
temp_dir_handler.cleanup()
|
||||
|
||||
await self.hass.async_add_executor_job(_move_and_cleanup)
|
||||
await self.load_backups()
|
||||
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
if self.backing_up:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2024.11.0"]
|
||||
"requirements": ["securetar==2024.2.1"]
|
||||
}
|
||||
|
@@ -5,6 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/baidu",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aip"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["baidu-aip==1.6.6"]
|
||||
}
|
||||
|
@@ -1,40 +0,0 @@
|
||||
"""Support for Bang & Olufsen diagnostics."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: BangOlufsenConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
data: dict = {
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"websocket_connected": config_entry.runtime_data.client.websocket_connected,
|
||||
}
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config_entry.unique_id
|
||||
|
||||
# Add media_player entity's state
|
||||
entity_registry = er.async_get(hass)
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data["media_player"] = state_dict
|
||||
|
||||
return data
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==4.1.1.116.3"],
|
||||
"requirements": ["mozart-api==4.1.1.116.0"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user