mirror of
https://github.com/home-assistant/core.git
synced 2025-11-24 10:16:58 +00:00
Compare commits
20 Commits
adguard/ad
...
get_trigge
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae7153850e | ||
|
|
ae3c744fee | ||
|
|
745beb1187 | ||
|
|
542012bb64 | ||
|
|
35d694ae22 | ||
|
|
1a796a0ca3 | ||
|
|
3f21183f93 | ||
|
|
44070b5331 | ||
|
|
8d3e0f6cb2 | ||
|
|
d71d93946c | ||
|
|
c52721e56a | ||
|
|
7cfd3e788d | ||
|
|
69b3e2585f | ||
|
|
ce13b485f6 | ||
|
|
06b10c0d05 | ||
|
|
5045823583 | ||
|
|
462ad0c010 | ||
|
|
c0aa463468 | ||
|
|
a5a0f0d29c | ||
|
|
4c63435aaf |
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -88,9 +88,13 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -223,7 +227,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -261,7 +265,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -305,7 +309,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@@ -414,7 +418,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
@@ -459,7 +463,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
|
||||
40
.github/workflows/wheels.yml
vendored
40
.github/workflows/wheels.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -77,8 +77,20 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
@@ -87,6 +99,13 @@ jobs:
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
@@ -119,6 +138,13 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
exclude:
|
||||
- abi: cp314
|
||||
arch: armv7
|
||||
- abi: cp314
|
||||
arch: armhf
|
||||
- abi: cp314
|
||||
arch: i386
|
||||
steps:
|
||||
- *checkout
|
||||
|
||||
@@ -128,6 +154,12 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- &download-build-constraints
|
||||
name: Download build_constraints
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- &download-requirements-diff
|
||||
name: Download requirements_diff
|
||||
uses: *actions-download-artifact
|
||||
@@ -167,7 +199,7 @@ jobs:
|
||||
- *checkout
|
||||
|
||||
- *download-env-file
|
||||
|
||||
- *download-build-constraints
|
||||
- *download-requirements-diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
@@ -177,6 +209,10 @@ jobs:
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
||||
fi
|
||||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
|
||||
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@@ -69,8 +69,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airly/ @bieniu
|
||||
/homeassistant/components/airnow/ @asymworks
|
||||
/tests/components/airnow/ @asymworks
|
||||
/homeassistant/components/airobot/ @mettolen
|
||||
/tests/components/airobot/ @mettolen
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
@@ -629,8 +627,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/guardian/ @bachya
|
||||
/homeassistant/components/habitica/ @tr4nt0r
|
||||
/tests/components/habitica/ @tr4nt0r
|
||||
/homeassistant/components/hanna/ @bestycame
|
||||
/tests/components/hanna/ @bestycame
|
||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||
/tests/components/hardkernel/ @home-assistant/core
|
||||
/homeassistant/components/hardware/ @home-assistant/core
|
||||
@@ -850,8 +846,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/kraken/ @eifinger
|
||||
/homeassistant/components/kulersky/ @emlove
|
||||
/tests/components/kulersky/ @emlove
|
||||
/homeassistant/components/labs/ @home-assistant/core
|
||||
/tests/components/labs/ @home-assistant/core
|
||||
/homeassistant/components/lacrosse_view/ @IceBotYT
|
||||
/tests/components/lacrosse_view/ @IceBotYT
|
||||
/homeassistant/components/lamarzocco/ @zweckj
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -21,9 +21,11 @@ ARG BUILD_ARCH
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -176,8 +176,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||
# Setup labs for preview features
|
||||
("labs", {"labs"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||
# Setup frontend
|
||||
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||
# Setup recorder
|
||||
@@ -214,7 +212,6 @@ DEFAULT_INTEGRATIONS = {
|
||||
"backup",
|
||||
"frontend",
|
||||
"hardware",
|
||||
"labs",
|
||||
"logger",
|
||||
"network",
|
||||
"system_health",
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""The Actron Air integration."""
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
ActronAirNeoACSystem,
|
||||
ActronNeoAPI,
|
||||
ActronNeoAPIError,
|
||||
ActronNeoAuthError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
@@ -23,16 +23,16 @@ PLATFORM = [Platform.CLIMATE]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Set up Actron Air integration from a config entry."""
|
||||
|
||||
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirACSystem] = []
|
||||
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirNeoACSystem] = []
|
||||
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronAirAuthError:
|
||||
except ActronNeoAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronAirAPIError as err:
|
||||
except ActronNeoAPIError as err:
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirStatus, ActronAirZone
|
||||
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
@@ -132,7 +132,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
||||
return self._status.max_temp
|
||||
|
||||
@property
|
||||
def _status(self) -> ActronAirStatus:
|
||||
def _status(self) -> ActronAirNeoStatus:
|
||||
"""Get the current status from the coordinator."""
|
||||
return self.coordinator.data
|
||||
|
||||
@@ -194,7 +194,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
zone: ActronAirZone,
|
||||
zone: ActronAirNeoZone,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, zone.title)
|
||||
@@ -221,7 +221,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
return self._zone.max_temp
|
||||
|
||||
@property
|
||||
def _zone(self) -> ActronAirZone:
|
||||
def _zone(self) -> ActronAirNeoZone:
|
||||
"""Get the current zone data from the coordinator."""
|
||||
status = self.coordinator.data
|
||||
return status.zones[self._zone_id]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
@@ -17,7 +17,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._api: ActronAirAPI | None = None
|
||||
self._api: ActronNeoAPI | None = None
|
||||
self._device_code: str | None = None
|
||||
self._user_code: str = ""
|
||||
self._verification_uri: str = ""
|
||||
@@ -30,10 +30,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
if self._api is None:
|
||||
_LOGGER.debug("Initiating device authorization")
|
||||
self._api = ActronAirAPI()
|
||||
self._api = ActronNeoAPI()
|
||||
try:
|
||||
device_code_response = await self._api.request_device_code()
|
||||
except ActronAirAuthError as err:
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
@@ -50,7 +50,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await self._api.poll_for_token(self._device_code)
|
||||
_LOGGER.debug("Authorization successful")
|
||||
except ActronAirAuthError as ex:
|
||||
except ActronNeoAuthError as ex:
|
||||
_LOGGER.exception("Error while waiting for device authorization")
|
||||
raise CannotConnect from ex
|
||||
|
||||
@@ -89,7 +89,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
user_data = await self._api.get_user_info()
|
||||
except ActronAirAuthError as err:
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("Error getting user info: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ ERROR_UNKNOWN = "unknown_error"
|
||||
class ActronAirRuntimeData:
|
||||
"""Runtime data for the Actron Air integration."""
|
||||
|
||||
api: ActronAirAPI
|
||||
api: ActronNeoAPI
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||
|
||||
|
||||
@@ -33,15 +33,15 @@ AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
api: ActronAirAPI,
|
||||
system: ActronAirACSystem,
|
||||
api: ActronNeoAPI,
|
||||
system: ActronAirNeoACSystem,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
|
||||
async def _async_update_data(self) -> ActronAirStatus:
|
||||
async def _async_update_data(self) -> ActronAirNeoStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.1.87"]
|
||||
"requirements": ["actron-neo-api==0.1.84"]
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
"""The Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Set up Airobot from a config entry."""
|
||||
coordinator = AirobotDataUpdateCoordinator(hass, entry)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,151 +0,0 @@
|
||||
"""Climate platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.const import (
|
||||
MODE_AWAY,
|
||||
MODE_HOME,
|
||||
SETPOINT_TEMP_MAX,
|
||||
SETPOINT_TEMP_MIN,
|
||||
)
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_HOME,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
_PRESET_MODE_2_MODE = {
|
||||
PRESET_AWAY: MODE_AWAY,
|
||||
PRESET_HOME: MODE_HOME,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot climate platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities([AirobotClimate(coordinator)])
|
||||
|
||||
|
||||
class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
"""Representation of an Airobot thermostat."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "thermostat"
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_preset_modes = [PRESET_HOME, PRESET_AWAY, PRESET_BOOST]
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_min_temp = SETPOINT_TEMP_MIN
|
||||
_attr_max_temp = SETPOINT_TEMP_MAX
|
||||
|
||||
@property
|
||||
def _status(self) -> ThermostatStatus:
|
||||
"""Get status from coordinator data."""
|
||||
return self.coordinator.data.status
|
||||
|
||||
@property
|
||||
def _settings(self) -> ThermostatSettings:
|
||||
"""Get settings from coordinator data."""
|
||||
return self.coordinator.data.settings
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
if self._settings.is_home_mode:
|
||||
return self._settings.setpoint_temp
|
||||
return self._settings.setpoint_temp_away
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return current HVAC mode."""
|
||||
if self._status.is_heating:
|
||||
return HVACMode.HEAT
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return current HVAC action."""
|
||||
if self._status.is_heating:
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return current preset mode."""
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
return PRESET_BOOST
|
||||
if self._settings.is_home_mode:
|
||||
return PRESET_HOME
|
||||
return PRESET_AWAY
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
try:
|
||||
if self._settings.is_home_mode:
|
||||
await self.coordinator.client.set_home_temperature(float(temperature))
|
||||
else:
|
||||
await self.coordinator.client.set_away_temperature(float(temperature))
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_temperature_failed",
|
||||
translation_placeholders={"temperature": str(temperature)},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
if preset_mode == PRESET_BOOST:
|
||||
# Enable boost mode
|
||||
if not self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(True)
|
||||
else:
|
||||
# Disable boost mode if it's enabled
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(False)
|
||||
|
||||
# Set the mode (HOME or AWAY)
|
||||
await self.coordinator.client.set_mode(_PRESET_MODE_2_MODE[preset_mode])
|
||||
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_preset_mode_failed",
|
||||
translation_placeholders={"preset_mode": preset_mode},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -1,183 +0,0 @@
|
||||
"""Config flow for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import (
|
||||
AirobotAuthError,
|
||||
AirobotConnectionError,
|
||||
AirobotError,
|
||||
AirobotTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow as BaseConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeviceInfo:
|
||||
"""Device information."""
|
||||
|
||||
title: str
|
||||
device_id: str
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> DeviceInfo:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
client = AirobotClient(
|
||||
host=data[CONF_HOST],
|
||||
username=data[CONF_USERNAME],
|
||||
password=data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
# Try to fetch data to validate connection and authentication
|
||||
status = await client.get_statuses()
|
||||
settings = await client.get_settings()
|
||||
except AirobotAuthError as err:
|
||||
raise InvalidAuth from err
|
||||
except (AirobotConnectionError, AirobotTimeoutError, AirobotError) as err:
|
||||
raise CannotConnect from err
|
||||
|
||||
# Use device name or device ID as title
|
||||
title = settings.device_name or status.device_id
|
||||
|
||||
return DeviceInfo(title=title, device_id=status.device_id)
|
||||
|
||||
|
||||
class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airobot."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered_host: str | None = None
|
||||
self._discovered_mac: str | None = None
|
||||
self._discovered_device_id: str | None = None
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
# Store the discovered IP address and MAC
|
||||
self._discovered_host = discovery_info.ip
|
||||
self._discovered_mac = discovery_info.macaddress
|
||||
|
||||
# Extract device_id from hostname (format: airobot-thermostat-t01xxxxxx)
|
||||
hostname = discovery_info.hostname.lower()
|
||||
device_id = hostname.replace("airobot-thermostat-", "").upper()
|
||||
self._discovered_device_id = device_id
|
||||
# Set unique_id to device_id for duplicate detection
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
# Show the confirmation form
|
||||
return await self.async_step_dhcp_confirm()
|
||||
|
||||
async def async_step_dhcp_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery confirmation - ask for credentials only."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
# Combine discovered host and device_id with user-provided password
|
||||
data = {
|
||||
CONF_HOST: self._discovered_host,
|
||||
CONF_USERNAME: self._discovered_device_id,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Store MAC address in config entry data
|
||||
if self._discovered_mac:
|
||||
data[CONF_MAC] = self._discovered_mac
|
||||
|
||||
return self.async_create_entry(title=info.title, data=data)
|
||||
|
||||
# Only ask for password since we already have the device_id from discovery
|
||||
return self.async_show_form(
|
||||
step_id="dhcp_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._discovered_host or "",
|
||||
"device_id": self._discovered_device_id or "",
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Use device ID as unique ID to prevent duplicates
|
||||
await self.async_set_unique_id(info.device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info.title, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Airobot integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "airobot"
|
||||
@@ -1,59 +0,0 @@
|
||||
"""Coordinator for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import AirobotData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Update interval - thermostat measures air every 30 seconds
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
type AirobotConfigEntry = ConfigEntry[AirobotDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
"""Class to manage fetching Airobot data."""
|
||||
|
||||
config_entry: AirobotConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AirobotConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
self.client = AirobotClient(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirobotData:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
@@ -1,42 +0,0 @@
|
||||
"""Base entity for Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirobotEntity(CoordinatorEntity[AirobotDataUpdateCoordinator]):
|
||||
"""Base class for Airobot entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
status = coordinator.data.status
|
||||
settings = coordinator.data.settings
|
||||
|
||||
self._attr_unique_id = status.device_id
|
||||
|
||||
connections = set()
|
||||
if (mac := coordinator.config_entry.data.get(CONF_MAC)) is not None:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, status.device_id)},
|
||||
connections=connections,
|
||||
name=settings.device_name or status.device_id,
|
||||
manufacturer="Airobot",
|
||||
model="Thermostat",
|
||||
model_id="TE1",
|
||||
sw_version=str(status.fw_version),
|
||||
hw_version=str(status.hw_version),
|
||||
)
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"domain": "airobot",
|
||||
"name": "Airobot",
|
||||
"codeowners": ["@mettolen"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "airobot-thermostat-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airobot",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
"""Models for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirobotData:
|
||||
"""Data from the Airobot coordinator."""
|
||||
|
||||
status: ThermostatStatus
|
||||
settings: ThermostatSettings
|
||||
@@ -1,70 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not use event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -1,44 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"dhcp_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The thermostat password."
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
"password": "The thermostat password.",
|
||||
"username": "The thermostat Device ID (e.g., T01XXXXXX)."
|
||||
},
|
||||
"description": "Enter your Airobot thermostat connection details. Find the Device ID and password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -45,7 +45,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
return await api.login.login_mode_interactive(data[CONF_CODE])
|
||||
return await api.login_mode_interactive(data[CONF_CODE])
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -59,7 +59,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login.login_mode_stored_data()
|
||||
await self.api.login_mode_stored_data()
|
||||
data = await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.2"]
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -29,36 +30,14 @@ __all__ = [
|
||||
"async_devices_payload",
|
||||
]
|
||||
|
||||
CONF_SNAPSHOTS_URL = "snapshots_url"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SNAPSHOTS_URL): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
"""Set up the analytics integration."""
|
||||
analytics_config = config.get(DOMAIN, {})
|
||||
|
||||
# For now we want to enable device analytics only if the url option
|
||||
# is explicitly listed in YAML.
|
||||
if CONF_SNAPSHOTS_URL in analytics_config:
|
||||
disable_snapshots = False
|
||||
snapshots_url = analytics_config[CONF_SNAPSHOTS_URL]
|
||||
else:
|
||||
disable_snapshots = True
|
||||
snapshots_url = None
|
||||
|
||||
analytics = Analytics(hass, snapshots_url, disable_snapshots)
|
||||
analytics = Analytics(hass)
|
||||
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
@@ -59,6 +59,9 @@ from homeassistant.loader import (
|
||||
from homeassistant.setup import async_get_loaded_integrations
|
||||
|
||||
from .const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
ATTR_ADDON_COUNT,
|
||||
ATTR_ADDONS,
|
||||
ATTR_ARCH,
|
||||
@@ -88,14 +91,10 @@ from .const import (
|
||||
ATTR_USER_COUNT,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
BASIC_ENDPOINT_URL,
|
||||
BASIC_ENDPOINT_URL_DEV,
|
||||
DOMAIN,
|
||||
INTERVAL,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
SNAPSHOT_DEFAULT_URL,
|
||||
SNAPSHOT_URL_PATH,
|
||||
SNAPSHOT_VERSION,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
@@ -237,18 +236,10 @@ class AnalyticsData:
|
||||
class Analytics:
|
||||
"""Analytics helper class for the analytics integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
snapshots_url: str | None = None,
|
||||
disable_snapshots: bool = False,
|
||||
) -> None:
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the Analytics class."""
|
||||
self._hass: HomeAssistant = hass
|
||||
self._snapshots_url = snapshots_url
|
||||
self._disable_snapshots = disable_snapshots
|
||||
|
||||
self._session = async_get_clientsession(hass)
|
||||
self.hass: HomeAssistant = hass
|
||||
self.session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {})
|
||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||
@@ -258,15 +249,13 @@ class Analytics:
|
||||
def preferences(self) -> dict:
|
||||
"""Return the current active preferences."""
|
||||
preferences = self._data.preferences
|
||||
result = {
|
||||
return {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
}
|
||||
if not self._disable_snapshots:
|
||||
result[ATTR_SNAPSHOTS] = preferences.get(ATTR_SNAPSHOTS, False)
|
||||
return result
|
||||
|
||||
@property
|
||||
def onboarded(self) -> bool:
|
||||
@@ -283,13 +272,13 @@ class Analytics:
|
||||
"""Return the endpoint that will receive the payload."""
|
||||
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||
# dev installations will contact the dev analytics environment
|
||||
return BASIC_ENDPOINT_URL_DEV
|
||||
return BASIC_ENDPOINT_URL
|
||||
return ANALYTICS_ENDPOINT_URL_DEV
|
||||
return ANALYTICS_ENDPOINT_URL
|
||||
|
||||
@property
|
||||
def supervisor(self) -> bool:
|
||||
"""Return bool if a supervisor is present."""
|
||||
return is_hassio(self._hass)
|
||||
return is_hassio(self.hass)
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load preferences."""
|
||||
@@ -299,7 +288,7 @@ class Analytics:
|
||||
|
||||
if (
|
||||
self.supervisor
|
||||
and (supervisor_info := hassio.get_supervisor_info(self._hass)) is not None
|
||||
and (supervisor_info := hassio.get_supervisor_info(self.hass)) is not None
|
||||
):
|
||||
if not self.onboarded:
|
||||
# User have not configured analytics, get this setting from the supervisor
|
||||
@@ -326,7 +315,7 @@ class Analytics:
|
||||
|
||||
if self.supervisor:
|
||||
await hassio.async_update_diagnostics(
|
||||
self._hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
self.hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
)
|
||||
|
||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||
@@ -334,7 +323,7 @@ class Analytics:
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
return
|
||||
|
||||
hass = self._hass
|
||||
hass = self.hass
|
||||
supervisor_info = None
|
||||
operating_system_info: dict[str, Any] = {}
|
||||
|
||||
@@ -474,7 +463,7 @@ class Analytics:
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self._session.post(self.endpoint_basic, json=payload)
|
||||
response = await self.session.post(self.endpoint_basic, json=payload)
|
||||
if response.status == 200:
|
||||
LOGGER.info(
|
||||
(
|
||||
@@ -490,9 +479,11 @@ class Analytics:
|
||||
self.endpoint_basic,
|
||||
)
|
||||
except TimeoutError:
|
||||
LOGGER.error("Timeout sending analytics to %s", BASIC_ENDPOINT_URL)
|
||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error("Error sending analytics to %s: %r", BASIC_ENDPOINT_URL, err)
|
||||
LOGGER.error(
|
||||
"Error sending analytics to %s: %r", ANALYTICS_ENDPOINT_URL, err
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_should_report_integration(
|
||||
@@ -516,7 +507,7 @@ class Analytics:
|
||||
if not integration.config_flow:
|
||||
return False
|
||||
|
||||
entries = self._hass.config_entries.async_entries(integration.domain)
|
||||
entries = self.hass.config_entries.async_entries(integration.domain)
|
||||
|
||||
# Filter out ignored and disabled entries
|
||||
return any(
|
||||
@@ -530,7 +521,7 @@ class Analytics:
|
||||
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||
return
|
||||
|
||||
payload = await _async_snapshot_payload(self._hass)
|
||||
payload = await _async_snapshot_payload(self.hass)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
@@ -541,16 +532,11 @@ class Analytics:
|
||||
self._data.submission_identifier
|
||||
)
|
||||
|
||||
url = (
|
||||
self._snapshots_url
|
||||
if self._snapshots_url is not None
|
||||
else SNAPSHOT_DEFAULT_URL
|
||||
)
|
||||
url += SNAPSHOT_URL_PATH
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self._session.post(url, json=payload, headers=headers)
|
||||
response = await self.session.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
|
||||
)
|
||||
|
||||
if response.status == 200: # OK
|
||||
response_data = await response.json()
|
||||
@@ -576,7 +562,7 @@ class Analytics:
|
||||
# Clear the invalid identifier and retry on next cycle
|
||||
LOGGER.warning(
|
||||
"Invalid submission identifier to %s, clearing: %s",
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
self._data.submission_identifier = None
|
||||
@@ -585,7 +571,7 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||
error_kind,
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
|
||||
@@ -593,7 +579,7 @@ class Analytics:
|
||||
response_text = await response.text()
|
||||
LOGGER.warning(
|
||||
"Snapshot analytics service %s unavailable: %s",
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
response_text,
|
||||
)
|
||||
|
||||
@@ -601,18 +587,18 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||
response.status,
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
except TimeoutError:
|
||||
LOGGER.error(
|
||||
"Timeout sending snapshot analytics to %s",
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending snapshot analytics to %s: %r",
|
||||
url,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
err,
|
||||
)
|
||||
|
||||
@@ -636,7 +622,7 @@ class Analytics:
|
||||
elif self._basic_scheduled is None:
|
||||
# Wait 15 min after started for basic analytics
|
||||
self._basic_scheduled = async_call_later(
|
||||
self._hass,
|
||||
self.hass,
|
||||
900,
|
||||
HassJob(
|
||||
self._async_schedule_basic,
|
||||
@@ -645,7 +631,10 @@ class Analytics:
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or self._disable_snapshots:
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
|
||||
ReleaseChannel.DEV,
|
||||
ReleaseChannel.NIGHTLY,
|
||||
):
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
@@ -653,11 +642,9 @@ class Analytics:
|
||||
elif self._snapshot_scheduled is None:
|
||||
snapshot_submission_time = self._data.snapshot_submission_time
|
||||
|
||||
interval_seconds = INTERVAL.total_seconds()
|
||||
|
||||
if snapshot_submission_time is None:
|
||||
# Randomize the submission time within the 24 hours
|
||||
snapshot_submission_time = random.uniform(0, interval_seconds)
|
||||
snapshot_submission_time = random.uniform(0, 86400)
|
||||
self._data.snapshot_submission_time = snapshot_submission_time
|
||||
await self._save()
|
||||
LOGGER.debug(
|
||||
@@ -667,10 +654,10 @@ class Analytics:
|
||||
|
||||
# Calculate delay until next submission
|
||||
current_time = time.time()
|
||||
delay = (snapshot_submission_time - current_time) % interval_seconds
|
||||
delay = (snapshot_submission_time - current_time) % 86400
|
||||
|
||||
self._snapshot_scheduled = async_call_later(
|
||||
self._hass,
|
||||
self.hass,
|
||||
delay,
|
||||
HassJob(
|
||||
self._async_schedule_snapshots,
|
||||
@@ -685,7 +672,7 @@ class Analytics:
|
||||
|
||||
# Send basic analytics every day
|
||||
self._basic_scheduled = async_track_time_interval(
|
||||
self._hass,
|
||||
self.hass,
|
||||
self.send_analytics,
|
||||
INTERVAL,
|
||||
name="basic analytics daily",
|
||||
@@ -698,7 +685,7 @@ class Analytics:
|
||||
|
||||
# Send snapshot analytics every day
|
||||
self._snapshot_scheduled = async_track_time_interval(
|
||||
self._hass,
|
||||
self.hass,
|
||||
self.send_snapshot,
|
||||
INTERVAL,
|
||||
name="snapshot analytics daily",
|
||||
|
||||
@@ -5,17 +5,15 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
SNAPSHOT_VERSION = "1"
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
DOMAIN = "analytics"
|
||||
INTERVAL = timedelta(days=1)
|
||||
STORAGE_KEY = "core.analytics"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
BASIC_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
BASIC_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
|
||||
SNAPSHOT_VERSION = 1
|
||||
SNAPSHOT_DEFAULT_URL = "https://device-database.eco-dev-aws.openhomefoundation.com"
|
||||
SNAPSHOT_URL_PATH = f"/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
|
||||
LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["python_awair"],
|
||||
"requirements": ["python-awair==0.2.5"],
|
||||
"requirements": ["python-awair==0.2.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "awair*",
|
||||
|
||||
@@ -24,7 +24,7 @@ class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model_id=coordinator.brother.model,
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
@@ -8,8 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["brother==6.0.0"],
|
||||
"requirements": ["brother==5.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "brother*",
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: SNMP doesn't return error identifying an authentication problem, to change the SNMP community (simple password) the user should use reconfigure flow.
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The integration does not connect via HTTP instead it uses a shared SNMP engine.
|
||||
strict-typing: done
|
||||
@@ -17,7 +17,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -345,10 +345,12 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the native value of the sensor."""
|
||||
return self.entity_description.value(self.coordinator.data)
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_native_value = self.entity_description.value(self.coordinator.data)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Awaitable, Callable
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
import voluptuous as vol
|
||||
@@ -86,10 +86,6 @@ SIGNAL_CLOUD_CONNECTION_STATE: SignalType[CloudConnectionState] = SignalType(
|
||||
"CLOUD_CONNECTION_STATE"
|
||||
)
|
||||
|
||||
_SIGNAL_CLOUDHOOKS_UPDATED: SignalType[dict[str, Any]] = SignalType(
|
||||
"CLOUDHOOKS_UPDATED"
|
||||
)
|
||||
|
||||
STARTUP_REPAIR_DELAY = 1 # 1 hour
|
||||
|
||||
ALEXA_ENTITY_SCHEMA = vol.Schema(
|
||||
@@ -246,24 +242,6 @@ async def async_delete_cloudhook(hass: HomeAssistant, webhook_id: str) -> None:
|
||||
await hass.data[DATA_CLOUD].cloudhooks.async_delete(webhook_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen_cloudhook_change(
|
||||
hass: HomeAssistant,
|
||||
webhook_id: str,
|
||||
on_change: Callable[[dict[str, Any] | None], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for cloudhook changes for the given webhook and notify when modified or deleted."""
|
||||
|
||||
@callback
|
||||
def _handle_cloudhooks_updated(cloudhooks: dict[str, Any]) -> None:
|
||||
"""Handle cloudhooks updated signal."""
|
||||
on_change(cloudhooks.get(webhook_id))
|
||||
|
||||
return async_dispatcher_connect(
|
||||
hass, _SIGNAL_CLOUDHOOKS_UPDATED, _handle_cloudhooks_updated
|
||||
)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def async_remote_ui_url(hass: HomeAssistant) -> str:
|
||||
@@ -311,7 +289,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
_handle_prefs_updated(hass, cloud)
|
||||
_remote_handle_prefs_updated(cloud)
|
||||
_setup_services(hass, prefs)
|
||||
|
||||
async def async_startup_repairs(_: datetime) -> None:
|
||||
@@ -395,32 +373,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
|
||||
@callback
|
||||
def _handle_prefs_updated(hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Register handler for cloud preferences updates."""
|
||||
cur_remote_enabled = cloud.client.prefs.remote_enabled
|
||||
cur_cloudhooks = cloud.client.prefs.cloudhooks
|
||||
def _remote_handle_prefs_updated(cloud: Cloud[CloudClient]) -> None:
|
||||
"""Handle remote preferences updated."""
|
||||
cur_pref = cloud.client.prefs.remote_enabled
|
||||
lock = asyncio.Lock()
|
||||
|
||||
async def on_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Handle cloud preferences updates."""
|
||||
nonlocal cur_remote_enabled
|
||||
nonlocal cur_cloudhooks
|
||||
# Sync remote connection with prefs
|
||||
async def remote_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Update remote status."""
|
||||
nonlocal cur_pref
|
||||
|
||||
# Lock protects cur_ state variables from concurrent updates
|
||||
async with lock:
|
||||
if cur_cloudhooks != prefs.cloudhooks:
|
||||
cur_cloudhooks = prefs.cloudhooks
|
||||
async_dispatcher_send(hass, _SIGNAL_CLOUDHOOKS_UPDATED, cur_cloudhooks)
|
||||
|
||||
if prefs.remote_enabled == cur_remote_enabled:
|
||||
if prefs.remote_enabled == cur_pref:
|
||||
return
|
||||
|
||||
if cur_remote_enabled := prefs.remote_enabled:
|
||||
if cur_pref := prefs.remote_enabled:
|
||||
await cloud.remote.connect()
|
||||
else:
|
||||
await cloud.remote.disconnect()
|
||||
|
||||
cloud.client.prefs.async_listen_updates(on_prefs_updated)
|
||||
cloud.client.prefs.async_listen_updates(remote_prefs_updated)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_area)
|
||||
websocket_api.async_register_command(hass, websocket_delete_area)
|
||||
websocket_api.async_register_command(hass, websocket_update_area)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_areas)
|
||||
return True
|
||||
|
||||
|
||||
@@ -146,27 +145,3 @@ def websocket_update_area(
|
||||
connection.send_error(msg["id"], "invalid_info", str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"], entry.json_fragment)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/area_registry/reorder",
|
||||
vol.Required("area_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_areas(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Handle reorder areas websocket command."""
|
||||
registry = ar.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["area_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_floor)
|
||||
websocket_api.async_register_command(hass, websocket_delete_floor)
|
||||
websocket_api.async_register_command(hass, websocket_update_floor)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_floors)
|
||||
return True
|
||||
|
||||
|
||||
@@ -128,28 +127,6 @@ def websocket_update_floor(
|
||||
connection.send_result(msg["id"], _entry_dict(entry))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/floor_registry/reorder",
|
||||
vol.Required("floor_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_floors(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle reorder floors websocket command."""
|
||||
registry = fr.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["floor_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
def _entry_dict(entry: FloorEntry) -> dict[str, Any]:
|
||||
"""Convert entry to API format."""
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -26,7 +25,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -169,7 +167,6 @@ class DecoraWifiLight(LightEntity):
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||
|
||||
@Throttle(timedelta(seconds=30))
|
||||
def update(self) -> None:
|
||||
"""Fetch new state data for this switch."""
|
||||
try:
|
||||
|
||||
@@ -5,10 +5,5 @@
|
||||
"default": "mdi:chart-line"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"service": "mdi:reload"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,10 +20,8 @@ from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_SOURCE,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -46,7 +44,6 @@ from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -56,7 +53,6 @@ from .const import (
|
||||
CONF_UNIT,
|
||||
CONF_UNIT_PREFIX,
|
||||
CONF_UNIT_TIME,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -89,7 +85,6 @@ DEFAULT_TIME_WINDOW = 0
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Required(CONF_SOURCE): cv.entity_id,
|
||||
vol.Optional(CONF_ROUND_DIGITS, default=DEFAULT_ROUND): vol.Coerce(int),
|
||||
vol.Optional(CONF_UNIT_PREFIX, default=None): vol.In(UNIT_PREFIXES),
|
||||
@@ -150,8 +145,6 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the derivative sensor."""
|
||||
await async_setup_reload_service(hass, DOMAIN, [Platform.SENSOR])
|
||||
|
||||
derivative = DerivativeSensor(
|
||||
hass,
|
||||
name=config.get(CONF_NAME),
|
||||
@@ -161,7 +154,7 @@ async def async_setup_platform(
|
||||
unit_of_measurement=config.get(CONF_UNIT),
|
||||
unit_prefix=config[CONF_UNIT_PREFIX],
|
||||
unit_time=config[CONF_UNIT_TIME],
|
||||
unique_id=config.get(CONF_UNIQUE_ID),
|
||||
unique_id=None,
|
||||
max_sub_interval=config.get(CONF_MAX_SUB_INTERVAL),
|
||||
)
|
||||
|
||||
@@ -293,14 +286,14 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _handle_restore(self) -> None:
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
restored_data = await self.async_get_last_sensor_data()
|
||||
if restored_data:
|
||||
if self._attr_native_unit_of_measurement is None:
|
||||
# Only restore the unit if it's not assigned from YAML
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
try:
|
||||
self._attr_native_value = round(
|
||||
Decimal(restored_data.native_value), # type: ignore[arg-type]
|
||||
@@ -309,11 +302,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
except (InvalidOperation, TypeError):
|
||||
self._attr_native_value = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
await self._handle_restore()
|
||||
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
self._derive_and_set_attributes_from_state(source_state)
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
reload:
|
||||
@@ -58,11 +58,5 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads derivative sensors from the YAML-configuration.",
|
||||
"name": "[%key:common::action::reload%]"
|
||||
}
|
||||
},
|
||||
"title": "Derivative sensor"
|
||||
}
|
||||
|
||||
259
homeassistant/components/dominos/__init__.py
Normal file
259
homeassistant/components/dominos/__init__.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""Support for Dominos Pizza ordering."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pizzapi import Address, Customer, Order
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The domain of your component. Should be equal to the name of your component.
|
||||
DOMAIN = "dominos"
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
ATTR_COUNTRY = "country_code"
|
||||
ATTR_FIRST_NAME = "first_name"
|
||||
ATTR_LAST_NAME = "last_name"
|
||||
ATTR_EMAIL = "email"
|
||||
ATTR_PHONE = "phone"
|
||||
ATTR_ADDRESS = "address"
|
||||
ATTR_ORDERS = "orders"
|
||||
ATTR_SHOW_MENU = "show_menu"
|
||||
ATTR_ORDER_ENTITY = "order_entity_id"
|
||||
ATTR_ORDER_NAME = "name"
|
||||
ATTR_ORDER_CODES = "codes"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
|
||||
MIN_TIME_BETWEEN_STORE_UPDATES = timedelta(minutes=3330)
|
||||
|
||||
_ORDERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_NAME): cv.string,
|
||||
vol.Required(ATTR_ORDER_CODES): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_COUNTRY): cv.string,
|
||||
vol.Required(ATTR_FIRST_NAME): cv.string,
|
||||
vol.Required(ATTR_LAST_NAME): cv.string,
|
||||
vol.Required(ATTR_EMAIL): cv.string,
|
||||
vol.Required(ATTR_PHONE): cv.string,
|
||||
vol.Required(ATTR_ADDRESS): cv.string,
|
||||
vol.Optional(ATTR_SHOW_MENU): cv.boolean,
|
||||
vol.Optional(ATTR_ORDERS, default=[]): vol.All(
|
||||
cv.ensure_list, [_ORDERS_SCHEMA]
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up is called when Home Assistant is loading our component."""
|
||||
dominos = Dominos(hass, config)
|
||||
|
||||
component = EntityComponent[DominosOrder](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DOMAIN] = {}
|
||||
entities: list[DominosOrder] = []
|
||||
conf = config[DOMAIN]
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
"order",
|
||||
dominos.handle_order,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_ENTITY): cv.entity_ids,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
if conf.get(ATTR_SHOW_MENU):
|
||||
hass.http.register_view(DominosProductListView(dominos))
|
||||
|
||||
for order_info in conf.get(ATTR_ORDERS):
|
||||
order = DominosOrder(order_info, dominos)
|
||||
entities.append(order)
|
||||
|
||||
component.add_entities(entities)
|
||||
|
||||
# Return boolean to indicate that initialization was successfully.
|
||||
return True
|
||||
|
||||
|
||||
class Dominos:
|
||||
"""Main Dominos service."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Set up main service."""
|
||||
conf = config[DOMAIN]
|
||||
|
||||
self.hass = hass
|
||||
self.customer = Customer(
|
||||
conf.get(ATTR_FIRST_NAME),
|
||||
conf.get(ATTR_LAST_NAME),
|
||||
conf.get(ATTR_EMAIL),
|
||||
conf.get(ATTR_PHONE),
|
||||
conf.get(ATTR_ADDRESS),
|
||||
)
|
||||
self.address = Address(
|
||||
*self.customer.address.split(","), country=conf.get(ATTR_COUNTRY)
|
||||
)
|
||||
self.country = conf.get(ATTR_COUNTRY)
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
|
||||
def handle_order(self, call: ServiceCall) -> None:
|
||||
"""Handle ordering pizza."""
|
||||
entity_ids = call.data[ATTR_ORDER_ENTITY]
|
||||
|
||||
target_orders = [
|
||||
order
|
||||
for order in self.hass.data[DOMAIN]["entities"]
|
||||
if order.entity_id in entity_ids
|
||||
]
|
||||
|
||||
for order in target_orders:
|
||||
order.place()
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_STORE_UPDATES)
|
||||
def update_closest_store(self):
|
||||
"""Update the shared closest store (if open)."""
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_menu(self):
|
||||
"""Return the products from the closest stores menu."""
|
||||
self.update_closest_store()
|
||||
if self.closest_store is None:
|
||||
_LOGGER.warning("Cannot get menu. Store may be closed")
|
||||
return []
|
||||
menu = self.closest_store.get_menu()
|
||||
product_entries = []
|
||||
|
||||
for product in menu.products:
|
||||
item = {}
|
||||
if isinstance(product.menu_data["Variants"], list):
|
||||
variants = ", ".join(product.menu_data["Variants"])
|
||||
else:
|
||||
variants = product.menu_data["Variants"]
|
||||
item["name"] = product.name
|
||||
item["variants"] = variants
|
||||
product_entries.append(item)
|
||||
|
||||
return product_entries
|
||||
|
||||
|
||||
class DominosProductListView(http.HomeAssistantView):
|
||||
"""View to retrieve product list content."""
|
||||
|
||||
url = "/api/dominos"
|
||||
name = "api:dominos"
|
||||
|
||||
def __init__(self, dominos):
|
||||
"""Initialize suite view."""
|
||||
self.dominos = dominos
|
||||
|
||||
@callback
|
||||
def get(self, request):
|
||||
"""Retrieve if API is running."""
|
||||
return self.json(self.dominos.get_menu())
|
||||
|
||||
|
||||
class DominosOrder(Entity):
|
||||
"""Represents a Dominos order entity."""
|
||||
|
||||
def __init__(self, order_info, dominos):
|
||||
"""Set up the entity."""
|
||||
self._name = order_info["name"]
|
||||
self._product_codes = order_info["codes"]
|
||||
self._orderable = False
|
||||
self.dominos = dominos
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the orders name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def product_codes(self):
|
||||
"""Return the orders product codes."""
|
||||
return self._product_codes
|
||||
|
||||
@property
|
||||
def orderable(self):
|
||||
"""Return the true if orderable."""
|
||||
return self._orderable
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state either closed, orderable or unorderable."""
|
||||
if self.dominos.closest_store is None:
|
||||
return "closed"
|
||||
return "orderable" if self._orderable else "unorderable"
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Update the order state and refreshes the store."""
|
||||
try:
|
||||
self.dominos.update_closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
return
|
||||
|
||||
try:
|
||||
order = self.order()
|
||||
order.pay_with()
|
||||
self._orderable = True
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
|
||||
def order(self):
|
||||
"""Create the order object."""
|
||||
if self.dominos.closest_store is None:
|
||||
raise HomeAssistantError("No store available")
|
||||
|
||||
order = Order(
|
||||
self.dominos.closest_store,
|
||||
self.dominos.customer,
|
||||
self.dominos.address,
|
||||
self.dominos.country,
|
||||
)
|
||||
|
||||
for code in self._product_codes:
|
||||
order.add_item(code)
|
||||
|
||||
return order
|
||||
|
||||
def place(self):
|
||||
"""Place the order."""
|
||||
try:
|
||||
order = self.order()
|
||||
order.place()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
_LOGGER.warning(
|
||||
"Attempted to order Dominos - Order invalid or store closed"
|
||||
)
|
||||
7
homeassistant/components/dominos/icons.json
Normal file
7
homeassistant/components/dominos/icons.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"service": "mdi:pizza"
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/dominos/manifest.json
Normal file
11
homeassistant/components/dominos/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "dominos",
|
||||
"name": "Dominos Pizza",
|
||||
"codeowners": [],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dominos",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pizzapi"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pizzapi==0.0.6"]
|
||||
}
|
||||
6
homeassistant/components/dominos/services.yaml
Normal file
6
homeassistant/components/dominos/services.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
order:
|
||||
fields:
|
||||
order_entity_id:
|
||||
example: dominos.medium_pan
|
||||
selector:
|
||||
text:
|
||||
14
homeassistant/components/dominos/strings.json
Normal file
14
homeassistant/components/dominos/strings.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"description": "Places a set of orders with Domino's Pizza.",
|
||||
"fields": {
|
||||
"order_entity_id": {
|
||||
"description": "The ID (as specified in the configuration) of an order to place. If provided as an array, all the identified orders will be placed.",
|
||||
"name": "Order entity"
|
||||
}
|
||||
},
|
||||
"name": "Order"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,9 +29,9 @@ from homeassistant.const import (
|
||||
UnitOfVolumeFlowRate,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .config_flow import sensor_name
|
||||
from .const import CONF_ONLY_INCLUDE_FEEDID, FEED_ID, FEED_NAME, FEED_TAG
|
||||
@@ -267,9 +267,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
||||
self._attr_extra_state_attributes[ATTR_USERID] = elem["userid"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIME] = elem["time"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIMESTR] = (
|
||||
dt_util.as_local(
|
||||
dt_util.utc_from_timestamp(float(elem["time"]))
|
||||
).isoformat()
|
||||
template.timestamp_local(float(elem["time"]))
|
||||
)
|
||||
|
||||
self._attr_native_value = None
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
"requirements": ["home-assistant-frontend==20251105.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==16.0.1", "Pillow==12.0.0"]
|
||||
"requirements": ["av==13.1.0", "Pillow==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from secrets import token_hex
|
||||
import shutil
|
||||
|
||||
from aiohttp import BasicAuth, ClientSession, UnixConnector
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
@@ -37,12 +35,7 @@ from homeassistant.components.camera import (
|
||||
from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN
|
||||
from homeassistant.components.stream import Orientation
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
@@ -50,10 +43,7 @@ from homeassistant.helpers import (
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
@@ -62,7 +52,6 @@ from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_UNIX_SOCKET,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
@@ -71,49 +60,49 @@ from .server import Server
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_FFMPEG = "ffmpeg"
|
||||
_AUTH = "auth"
|
||||
|
||||
|
||||
def _validate_auth(config: dict) -> dict:
|
||||
"""Validate that username and password are only set when a URL is configured or when debug UI is enabled."""
|
||||
auth_exists = CONF_USERNAME in config
|
||||
debug_ui_enabled = config.get(CONF_DEBUG_UI, False)
|
||||
|
||||
if debug_ui_enabled and not auth_exists:
|
||||
raise vol.Invalid("Username and password must be set when debug_ui is true")
|
||||
|
||||
if auth_exists and CONF_URL not in config and not debug_ui_enabled:
|
||||
raise vol.Invalid(
|
||||
"Username and password can only be set when a URL is configured or debug_ui is true"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
_SUPPORTED_STREAMS = frozenset(
|
||||
(
|
||||
"bubble",
|
||||
"dvrip",
|
||||
"expr",
|
||||
_FFMPEG,
|
||||
"gopro",
|
||||
"homekit",
|
||||
"http",
|
||||
"https",
|
||||
"httpx",
|
||||
"isapi",
|
||||
"ivideon",
|
||||
"kasa",
|
||||
"nest",
|
||||
"onvif",
|
||||
"roborock",
|
||||
"rtmp",
|
||||
"rtmps",
|
||||
"rtmpx",
|
||||
"rtsp",
|
||||
"rtsps",
|
||||
"rtspx",
|
||||
"tapo",
|
||||
"tcp",
|
||||
"webrtc",
|
||||
"webtorrent",
|
||||
)
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(
|
||||
CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE
|
||||
): cv.boolean,
|
||||
vol.Inclusive(CONF_USERNAME, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
vol.Inclusive(CONF_PASSWORD, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
}
|
||||
),
|
||||
_validate_auth,
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.boolean,
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_DATA_GO2RTC: HassKey[Go2RtcConfig] = HassKey(DOMAIN)
|
||||
_DATA_GO2RTC: HassKey[str] = HassKey(DOMAIN)
|
||||
_RETRYABLE_ERRORS = (ClientConnectionError, ServerConnectionError)
|
||||
type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
|
||||
@@ -121,19 +110,12 @@ type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up WebRTC."""
|
||||
url: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
if DOMAIN not in config and DEFAULT_CONFIG_DOMAIN not in config:
|
||||
await _remove_go2rtc_entries(hass)
|
||||
return True
|
||||
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
username = domain_config.get(CONF_USERNAME)
|
||||
password = domain_config.get(CONF_PASSWORD)
|
||||
|
||||
if not (configured_by_user := DOMAIN in config) or not (
|
||||
url := domain_config.get(CONF_URL)
|
||||
url := config[DOMAIN].get(CONF_URL)
|
||||
):
|
||||
if not is_docker_env():
|
||||
if not configured_by_user:
|
||||
@@ -146,26 +128,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
_LOGGER.error("Could not find go2rtc docker binary")
|
||||
return False
|
||||
|
||||
# Generate random credentials when not provided to secure the server
|
||||
if not username or not password:
|
||||
username = token_hex()
|
||||
password = token_hex()
|
||||
_LOGGER.debug("Generated random credentials for go2rtc server")
|
||||
|
||||
auth = BasicAuth(username, password)
|
||||
# HA will manage the binary
|
||||
# Manually created session (not using the helper) needs to be closed manually
|
||||
# See on_stop listener below
|
||||
session = ClientSession(
|
||||
connector=UnixConnector(path=HA_MANAGED_UNIX_SOCKET), auth=auth
|
||||
)
|
||||
server = Server(
|
||||
hass,
|
||||
binary,
|
||||
session,
|
||||
enable_ui=domain_config.get(CONF_DEBUG_UI, False),
|
||||
username=username,
|
||||
password=password,
|
||||
hass, binary, enable_ui=config.get(DOMAIN, {}).get(CONF_DEBUG_UI, False)
|
||||
)
|
||||
try:
|
||||
await server.start()
|
||||
@@ -175,19 +140,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def on_stop(event: Event) -> None:
|
||||
await server.stop()
|
||||
await session.close()
|
||||
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop)
|
||||
|
||||
url = HA_MANAGED_URL
|
||||
elif username and password:
|
||||
# Create session with BasicAuth if credentials are provided
|
||||
auth = BasicAuth(username, password)
|
||||
session = async_create_clientsession(hass, auth=auth)
|
||||
else:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
hass.data[_DATA_GO2RTC] = Go2RtcConfig(url, session)
|
||||
hass.data[_DATA_GO2RTC] = url
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
@@ -203,9 +161,8 @@ async def _remove_go2rtc_entries(hass: HomeAssistant) -> None:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bool:
|
||||
"""Set up go2rtc from a config entry."""
|
||||
|
||||
config = hass.data[_DATA_GO2RTC]
|
||||
url = config.url
|
||||
session = config.session
|
||||
url = hass.data[_DATA_GO2RTC]
|
||||
session = async_get_clientsession(hass)
|
||||
client = Go2RtcRestClient(session, url)
|
||||
# Validate the server URL
|
||||
try:
|
||||
@@ -240,7 +197,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bo
|
||||
return False
|
||||
|
||||
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
|
||||
await provider.initialize()
|
||||
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
|
||||
return True
|
||||
|
||||
@@ -272,21 +228,16 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
self._session = session
|
||||
self._rest_client = rest_client
|
||||
self._sessions: dict[str, Go2RtcWsClient] = {}
|
||||
self._supported_schemes: set[str] = set()
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
"""Return the integration domain of the provider."""
|
||||
return DOMAIN
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the provider."""
|
||||
self._supported_schemes = await self._rest_client.schemes.list()
|
||||
|
||||
@callback
|
||||
def async_is_supported(self, stream_source: str) -> bool:
|
||||
"""Return if this provider is supports the Camera as source."""
|
||||
return stream_source.partition(":")[0] in self._supported_schemes
|
||||
return stream_source.partition(":")[0] in _SUPPORTED_STREAMS
|
||||
|
||||
async def async_handle_async_webrtc_offer(
|
||||
self,
|
||||
@@ -414,11 +365,3 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
for ws_client in self._sessions.values():
|
||||
await ws_client.close()
|
||||
self._sessions.clear()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Go2RtcConfig:
|
||||
"""Go2rtc configuration."""
|
||||
|
||||
url: str
|
||||
session: ClientSession
|
||||
|
||||
@@ -6,5 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
HA_MANAGED_UNIX_SOCKET = "/run/go2rtc.sock"
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
RECOMMENDED_VERSION = "1.9.11"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["go2rtc-client==0.3.0"],
|
||||
"requirements": ["go2rtc-client==0.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ from contextlib import suppress
|
||||
import logging
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_UNIX_SOCKET, HA_MANAGED_URL
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_TERMINATE_TIMEOUT = 5
|
||||
@@ -23,30 +23,14 @@ _LOG_BUFFER_SIZE = 512
|
||||
_RESPAWN_COOLDOWN = 1
|
||||
|
||||
# Default configuration for HA
|
||||
# - Unix socket for secure local communication
|
||||
# - Basic auth enabled, including local connections
|
||||
# - HTTP API only enabled when UI is enabled
|
||||
# - Api is listening only on localhost
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
app:
|
||||
modules: {app_modules}
|
||||
|
||||
api:
|
||||
listen: "{listen_config}"
|
||||
unix_listen: "{unix_socket}"
|
||||
allow_paths: {api_allow_paths}
|
||||
local_auth: true
|
||||
username: {username}
|
||||
password: {password}
|
||||
|
||||
# ffmpeg needs the exec module
|
||||
# Restrict execution to only ffmpeg binary
|
||||
exec:
|
||||
allow_paths:
|
||||
- ffmpeg
|
||||
listen: "{api_ip}:{api_port}"
|
||||
|
||||
rtsp:
|
||||
listen: "127.0.0.1:18554"
|
||||
@@ -56,43 +40,6 @@ webrtc:
|
||||
ice_servers: []
|
||||
"""
|
||||
|
||||
_APP_MODULES = (
|
||||
"api",
|
||||
"exec", # Execution module for ffmpeg
|
||||
"ffmpeg",
|
||||
"http",
|
||||
"mjpeg",
|
||||
"onvif",
|
||||
"rtmp",
|
||||
"rtsp",
|
||||
"srtp",
|
||||
"webrtc",
|
||||
"ws",
|
||||
)
|
||||
|
||||
_API_ALLOW_PATHS = (
|
||||
"/", # UI static page and version control
|
||||
"/api", # Main API path
|
||||
"/api/frame.jpeg", # Snapshot functionality
|
||||
"/api/schemes", # Supported stream schemes
|
||||
"/api/streams", # Stream management
|
||||
"/api/webrtc", # Webrtc functionality
|
||||
"/api/ws", # Websocket functionality (e.g. webrtc candidates)
|
||||
)
|
||||
|
||||
# Additional modules when UI is enabled
|
||||
_UI_APP_MODULES = (
|
||||
*_APP_MODULES,
|
||||
"debug",
|
||||
)
|
||||
# Additional api paths when UI is enabled
|
||||
_UI_API_ALLOW_PATHS = (
|
||||
*_API_ALLOW_PATHS,
|
||||
"/api/config", # UI config view
|
||||
"/api/log", # UI log view
|
||||
"/api/streams.dot", # UI network view
|
||||
)
|
||||
|
||||
_LOG_LEVEL_MAP = {
|
||||
"TRC": logging.DEBUG,
|
||||
"DBG": logging.DEBUG,
|
||||
@@ -114,40 +61,14 @@ class Go2RTCWatchdogError(HomeAssistantError):
|
||||
"""Raised on watchdog error."""
|
||||
|
||||
|
||||
def _format_list_for_yaml(items: tuple[str, ...]) -> str:
|
||||
"""Format a list of strings for yaml config."""
|
||||
if not items:
|
||||
return "[]"
|
||||
formatted_items = ",".join(f'"{item}"' for item in items)
|
||||
return f"[{formatted_items}]"
|
||||
|
||||
|
||||
def _create_temp_file(enable_ui: bool, username: str, password: str) -> str:
|
||||
def _create_temp_file(api_ip: str) -> str:
|
||||
"""Create temporary config file."""
|
||||
app_modules: tuple[str, ...] = _APP_MODULES
|
||||
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
|
||||
|
||||
if enable_ui:
|
||||
app_modules = _UI_APP_MODULES
|
||||
api_paths = _UI_API_ALLOW_PATHS
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
listen_config = f":{HA_MANAGED_API_PORT}"
|
||||
else:
|
||||
# Disable HTTP listening when UI is not enabled
|
||||
# as HA does not use it.
|
||||
listen_config = ""
|
||||
|
||||
# Set delete=False to prevent the file from being deleted when the file is closed
|
||||
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
||||
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
||||
file.write(
|
||||
_GO2RTC_CONFIG_FORMAT.format(
|
||||
listen_config=listen_config,
|
||||
unix_socket=HA_MANAGED_UNIX_SOCKET,
|
||||
app_modules=_format_list_for_yaml(app_modules),
|
||||
api_allow_paths=_format_list_for_yaml(api_paths),
|
||||
username=username,
|
||||
password=password,
|
||||
api_ip=api_ip, api_port=HA_MANAGED_API_PORT
|
||||
).encode()
|
||||
)
|
||||
return file.name
|
||||
@@ -157,25 +78,18 @@ class Server:
|
||||
"""Go2rtc server."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
binary: str,
|
||||
session: ClientSession,
|
||||
*,
|
||||
enable_ui: bool = False,
|
||||
username: str,
|
||||
password: str,
|
||||
self, hass: HomeAssistant, binary: str, *, enable_ui: bool = False
|
||||
) -> None:
|
||||
"""Initialize the server."""
|
||||
self._hass = hass
|
||||
self._binary = binary
|
||||
self._session = session
|
||||
self._enable_ui = enable_ui
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
||||
self._process: asyncio.subprocess.Process | None = None
|
||||
self._startup_complete = asyncio.Event()
|
||||
self._api_ip = _LOCALHOST_IP
|
||||
if enable_ui:
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
self._api_ip = ""
|
||||
self._watchdog_task: asyncio.Task | None = None
|
||||
self._watchdog_tasks: list[asyncio.Task] = []
|
||||
|
||||
@@ -190,7 +104,7 @@ class Server:
|
||||
"""Start the server."""
|
||||
_LOGGER.debug("Starting go2rtc server")
|
||||
config_file = await self._hass.async_add_executor_job(
|
||||
_create_temp_file, self._enable_ui, self._username, self._password
|
||||
_create_temp_file, self._api_ip
|
||||
)
|
||||
|
||||
self._startup_complete.clear()
|
||||
@@ -219,7 +133,7 @@ class Server:
|
||||
raise Go2RTCServerStartError from err
|
||||
|
||||
# Check the server version
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
await client.validate_server_version()
|
||||
|
||||
async def _log_output(self, process: asyncio.subprocess.Process) -> None:
|
||||
@@ -291,7 +205,7 @@ class Server:
|
||||
|
||||
async def _monitor_api(self) -> None:
|
||||
"""Raise if the go2rtc process terminates."""
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
|
||||
_LOGGER.debug("Monitoring go2rtc API")
|
||||
try:
|
||||
|
||||
@@ -97,8 +97,7 @@ SENSOR_DESCRIPTIONS = [
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -175,7 +174,7 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
if self._route is None:
|
||||
return None
|
||||
|
||||
return self._route.duration.seconds
|
||||
return round(self._route.duration.seconds / 60)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
"""The Hanna Instruments integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import HannaCloudClient
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import HannaConfigEntry, HannaDataCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
def _authenticate_and_get_devices(
|
||||
api_client: HannaCloudClient,
|
||||
email: str,
|
||||
password: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Authenticate and get devices in a single executor job."""
|
||||
api_client.authenticate(email, password)
|
||||
return api_client.get_devices()
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
|
||||
"""Set up Hanna Instruments from a config entry."""
|
||||
api_client = HannaCloudClient()
|
||||
devices = await hass.async_add_executor_job(
|
||||
_authenticate_and_get_devices,
|
||||
api_client,
|
||||
entry.data[CONF_EMAIL],
|
||||
entry.data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
# Create device coordinators
|
||||
device_coordinators = {}
|
||||
for device in devices:
|
||||
coordinator = HannaDataCoordinator(hass, entry, device, api_client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
device_coordinators[coordinator.device_identifier] = coordinator
|
||||
|
||||
# Set runtime data
|
||||
entry.runtime_data = device_coordinators
|
||||
|
||||
# Forward the setup to the platforms
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,62 +0,0 @@
|
||||
"""Config flow for Hanna Instruments integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import AuthenticationError, HannaCloudClient
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError, Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HannaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Hanna Instruments."""
|
||||
|
||||
VERSION = 1
|
||||
data_schema = vol.Schema(
|
||||
{vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str}
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the setup flow."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
self._abort_if_unique_id_configured()
|
||||
client = HannaCloudClient()
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
client.authenticate,
|
||||
user_input[CONF_EMAIL],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
except (Timeout, RequestsConnectionError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL],
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
self.data_schema, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Constants for the Hanna integration."""
|
||||
|
||||
DOMAIN = "hanna"
|
||||
@@ -1,72 +0,0 @@
|
||||
"""Hanna Instruments data coordinator for Home Assistant.
|
||||
|
||||
This module provides the data coordinator for fetching and managing Hanna Instruments
|
||||
sensor data.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from hanna_cloud import HannaCloudClient
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type HannaConfigEntry = ConfigEntry[dict[str, HannaDataCoordinator]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HannaDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Coordinator for fetching Hanna sensor data."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: HannaConfigEntry,
|
||||
device: dict[str, Any],
|
||||
api_client: HannaCloudClient,
|
||||
) -> None:
|
||||
"""Initialize the Hanna data coordinator."""
|
||||
self.api_client = api_client
|
||||
self.device_data = device
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{DOMAIN}_{self.device_identifier}",
|
||||
config_entry=config_entry,
|
||||
update_interval=timedelta(seconds=30),
|
||||
)
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str:
|
||||
"""Return the device identifier."""
|
||||
return self.device_data["DID"]
|
||||
|
||||
def get_parameters(self) -> list[dict[str, Any]]:
|
||||
"""Get all parameters from the sensor data."""
|
||||
return self.api_client.parameters
|
||||
|
||||
def get_parameter_value(self, key: str) -> Any:
|
||||
"""Get the value for a specific parameter."""
|
||||
for parameter in self.get_parameters():
|
||||
if parameter["name"] == key:
|
||||
return parameter["value"]
|
||||
return None
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch latest sensor data from the Hanna API."""
|
||||
try:
|
||||
readings = await self.hass.async_add_executor_job(
|
||||
self.api_client.get_last_device_reading, self.device_identifier
|
||||
)
|
||||
except RequestException as e:
|
||||
raise UpdateFailed(f"Error communicating with Hanna API: {e}") from e
|
||||
except (KeyError, IndexError) as e:
|
||||
raise UpdateFailed(f"Error parsing Hanna API response: {e}") from e
|
||||
return readings
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Hanna Instruments entity base class for Home Assistant.
|
||||
|
||||
This module provides the base entity class for Hanna Instruments entities.
|
||||
"""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import HannaDataCoordinator
|
||||
|
||||
|
||||
class HannaEntity(CoordinatorEntity[HannaDataCoordinator]):
|
||||
"""Base class for Hanna entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: HannaDataCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.device_identifier)},
|
||||
manufacturer=coordinator.device_data.get("manufacturer"),
|
||||
model=coordinator.device_data.get("DM"),
|
||||
name=coordinator.device_data.get("name"),
|
||||
serial_number=coordinator.device_data.get("serial_number"),
|
||||
sw_version=coordinator.device_data.get("sw_version"),
|
||||
)
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "hanna",
|
||||
"name": "Hanna",
|
||||
"codeowners": ["@bestycame"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hanna",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hanna-cloud==0.0.6"]
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't add actions.
|
||||
appropriate-polling:
|
||||
status: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -1,106 +0,0 @@
|
||||
"""Hanna Instruments sensor integration for Home Assistant.
|
||||
|
||||
This module provides sensor entities for various Hanna Instruments devices,
|
||||
including pH, ORP, temperature, and chemical sensors. It uses the Hanna API
|
||||
to fetch readings and updates them periodically.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfElectricPotential, UnitOfTemperature, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import HannaConfigEntry, HannaDataCoordinator
|
||||
from .entity import HannaEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
SensorEntityDescription(
|
||||
key="ph",
|
||||
translation_key="ph_value",
|
||||
device_class=SensorDeviceClass.PH,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="orp",
|
||||
translation_key="chlorine_orp_value",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="temp",
|
||||
translation_key="water_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="airTemp",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="acidBase",
|
||||
translation_key="ph_acid_base_flow_rate",
|
||||
icon="mdi:chemical-weapon",
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="cl",
|
||||
translation_key="chlorine_flow_rate",
|
||||
icon="mdi:chemical-weapon",
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HannaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hanna sensors from a config entry."""
|
||||
device_coordinators = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
HannaSensor(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
for coordinator in device_coordinators.values()
|
||||
)
|
||||
|
||||
|
||||
class HannaSensor(HannaEntity, SensorEntity):
|
||||
"""Representation of a Hanna sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HannaDataCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize a Hanna sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.device_identifier}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the value reported by the sensor."""
|
||||
return self.coordinator.get_parameter_value(self.entity_description.key)
|
||||
@@ -1,44 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "Email address for your Hanna Cloud account",
|
||||
"password": "Password for your Hanna Cloud account"
|
||||
},
|
||||
"description": "Enter your Hanna Cloud credentials"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"chlorine_flow_rate": {
|
||||
"name": "Chlorine flow rate"
|
||||
},
|
||||
"chlorine_orp_value": {
|
||||
"name": "Chlorine ORP value"
|
||||
},
|
||||
"ph_acid_base_flow_rate": {
|
||||
"name": "pH Acid/Base flow rate"
|
||||
},
|
||||
"water_temperature": {
|
||||
"name": "Water temperature"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,8 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, cast
|
||||
|
||||
from stringcase import snakecase
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
DOMAIN as DEVICE_TRACKER_DOMAIN,
|
||||
ScannerEntity,
|
||||
@@ -15,7 +18,6 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import snakecase
|
||||
|
||||
from . import Router
|
||||
from .const import (
|
||||
@@ -154,6 +156,22 @@ def async_add_new_entities(
|
||||
async_add_entities(new_entities, True)
|
||||
|
||||
|
||||
def _better_snakecase(text: str) -> str:
|
||||
# Awaiting https://github.com/okunishinishi/python-stringcase/pull/18
|
||||
if text == text.upper():
|
||||
# All uppercase to all lowercase to get http for HTTP, not h_t_t_p
|
||||
text = text.lower()
|
||||
else:
|
||||
# Three or more consecutive uppercase with middle part lowercased
|
||||
# to get http_response for HTTPResponse, not h_t_t_p_response
|
||||
text = re.sub(
|
||||
r"([A-Z])([A-Z]+)([A-Z](?:[^A-Z]|$))",
|
||||
lambda match: f"{match.group(1)}{match.group(2).lower()}{match.group(3)}",
|
||||
text,
|
||||
)
|
||||
return cast(str, snakecase(text))
|
||||
|
||||
|
||||
class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity):
|
||||
"""Huawei LTE router scanner entity."""
|
||||
|
||||
@@ -217,7 +235,7 @@ class HuaweiLteScannerEntity(HuaweiLteBaseEntity, ScannerEntity):
|
||||
self._ip_address = (host.get("IpAddress") or "").split(";", 2)[0] or None
|
||||
self._hostname = host.get("HostName")
|
||||
self._extra_state_attributes = {
|
||||
snakecase(k): v
|
||||
_better_snakecase(k): v
|
||||
for k, v in host.items()
|
||||
if k
|
||||
in {
|
||||
|
||||
@@ -6,7 +6,11 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/huawei_lte",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["huawei_lte_api.Session"],
|
||||
"requirements": ["huawei-lte-api==1.11.0", "url-normalize==2.2.1"],
|
||||
"requirements": [
|
||||
"huawei-lte-api==1.11.0",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==2.2.1"
|
||||
],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:InternetGatewayDevice:1",
|
||||
|
||||
@@ -13,8 +13,8 @@ rules:
|
||||
status: todo
|
||||
comment: See if we can catch more specific exceptions in get_device_info.
|
||||
dependency-transparency:
|
||||
status: done
|
||||
comment: huawei-lte-api is from https://gitlab.salamek.cz/Mirrors/huawei-lte-api, see https://github.com/Salamek/huawei-lte-api/issues/253
|
||||
status: todo
|
||||
comment: stringcase is not built and published to PyPI from a public CI pipeline. huawei-lte-api is from https://gitlab.salamek.cz/Mirrors/huawei-lte-api, see https://github.com/Salamek/huawei-lte-api/issues/253
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
@@ -82,4 +82,5 @@ rules:
|
||||
status: exempt
|
||||
comment: Underlying huawei-lte-api does not use aiohttp or httpx, so this does not apply.
|
||||
strict-typing:
|
||||
status: done
|
||||
status: todo
|
||||
comment: Integration is strictly typechecked already, and huawei-lte-api and url-normalize are in order. stringcase is not typed.
|
||||
|
||||
@@ -62,5 +62,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inkbird",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["inkbird-ble==1.1.1"]
|
||||
"requirements": ["inkbird-ble==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -145,10 +145,10 @@
|
||||
"loop": "Loop",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"seconds_1": "1 second",
|
||||
"seconds_2": "2 seconds",
|
||||
"seconds_3": "3 seconds",
|
||||
"seconds_4": "4 seconds",
|
||||
"seconds_5": "5 seconds"
|
||||
"seconds_2": "2 second",
|
||||
"seconds_3": "3 second",
|
||||
"seconds_4": "4 second",
|
||||
"seconds_5": "5 second"
|
||||
}
|
||||
},
|
||||
"min_dc_voltage_cells": {
|
||||
|
||||
@@ -11,11 +11,6 @@ from random import random
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.labs import (
|
||||
EVENT_LABS_UPDATED,
|
||||
EventLabsUpdatedData,
|
||||
async_is_preview_feature_enabled,
|
||||
)
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
@@ -35,14 +30,10 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import (
|
||||
@@ -119,23 +110,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Notify backup listeners
|
||||
hass.async_create_task(_notify_backup_listeners(hass), eager_start=False)
|
||||
|
||||
# Subscribe to labs feature updates for kitchen_sink preview repair
|
||||
@callback
|
||||
def _async_labs_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == "kitchen_sink"
|
||||
and event.data["preview_feature"] == "special_repair"
|
||||
):
|
||||
_async_update_special_repair(hass)
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen(EVENT_LABS_UPDATED, _async_labs_updated)
|
||||
)
|
||||
|
||||
# Check if lab feature is currently enabled and create repair if so
|
||||
_async_update_special_repair(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -163,27 +137,6 @@ async def async_remove_config_entry_device(
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def _async_update_special_repair(hass: HomeAssistant) -> None:
|
||||
"""Create or delete the special repair issue.
|
||||
|
||||
Creates a repair issue when the special_repair lab feature is enabled,
|
||||
and deletes it when disabled. This demonstrates how lab features can interact
|
||||
with Home Assistant's repair system.
|
||||
"""
|
||||
if async_is_preview_feature_enabled(hass, DOMAIN, "special_repair"):
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"kitchen_sink_special_repair_issue",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="special_repair",
|
||||
)
|
||||
else:
|
||||
async_delete_issue(hass, DOMAIN, "kitchen_sink_special_repair_issue")
|
||||
|
||||
|
||||
async def _notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
@@ -5,13 +5,6 @@
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/kitchen_sink",
|
||||
"iot_class": "calculated",
|
||||
"preview_features": {
|
||||
"special_repair": {
|
||||
"feedback_url": "https://community.home-assistant.io",
|
||||
"learn_more_url": "https://www.home-assistant.io/integrations/kitchen_sink",
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/kitchen_sink&integration_name=Kitchen%20Sink"
|
||||
}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -71,10 +71,6 @@
|
||||
},
|
||||
"title": "The blinker fluid is empty and needs to be refilled"
|
||||
},
|
||||
"special_repair": {
|
||||
"description": "This is a special repair created by a preview feature! This demonstrates how lab features can interact with the Home Assistant repair system. You can disable this by turning off the kitchen sink special repair feature in Settings > System > Labs.",
|
||||
"title": "Special repair feature preview"
|
||||
},
|
||||
"transmogrifier_deprecated": {
|
||||
"description": "The transmogrifier component is now deprecated due to the lack of local control available in the new API",
|
||||
"title": "The transmogrifier component is deprecated"
|
||||
@@ -107,14 +103,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"preview_features": {
|
||||
"special_repair": {
|
||||
"description": "Creates a **special repair issue** when enabled.\n\nThis demonstrates how lab features can interact with other Home Assistant integrations.",
|
||||
"disable_confirmation": "This will remove the special repair issue. Don't worry, this is just a demonstration feature.",
|
||||
"enable_confirmation": "This will create a special repair issue to demonstrate Labs preview features. This is just an example and won't affect your actual system.",
|
||||
"name": "Special repair"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"test_service_1": {
|
||||
"description": "Fake action for testing",
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.11.0",
|
||||
"xknx==3.10.1",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
|
||||
@@ -279,7 +279,7 @@ LIGHT_KNX_SCHEMA = AllSerializeFirst(
|
||||
translation_key="hsv_addresses",
|
||||
schema={
|
||||
vol.Required(CONF_GA_HUE): GASelector(
|
||||
write_required=True, valid_dpt="5.003"
|
||||
write_required=True, valid_dpt="5.001"
|
||||
),
|
||||
vol.Required(CONF_GA_SATURATION): GASelector(
|
||||
write_required=True, valid_dpt="5.001"
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
"""The Home Assistant Labs integration.
|
||||
|
||||
This integration provides preview features that can be toggled on/off by users.
|
||||
Integrations can register lab preview features in their manifest.json which will appear
|
||||
in the Home Assistant Labs UI for users to enable or disable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.backup import async_get_manager
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
EVENT_LABS_UPDATED,
|
||||
LABS_DATA,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
LabsData,
|
||||
LabsStoreData,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
__all__ = [
|
||||
"EVENT_LABS_UPDATED",
|
||||
"EventLabsUpdatedData",
|
||||
"async_is_preview_feature_enabled",
|
||||
]
|
||||
|
||||
|
||||
class LabsStorage(Store[LabsStoreData]):
|
||||
"""Custom Store for Labs that converts between runtime and storage formats.
|
||||
|
||||
Runtime format: {"preview_feature_status": {(domain, preview_feature)}}
|
||||
Storage format: {"preview_feature_status": [{"domain": str, "preview_feature": str}]}
|
||||
|
||||
Only enabled features are saved to storage - if stored, it's enabled.
|
||||
"""
|
||||
|
||||
async def _async_load_data(self) -> LabsStoreData | None:
|
||||
"""Load data and convert from storage format to runtime format."""
|
||||
raw_data = await super()._async_load_data()
|
||||
if raw_data is None:
|
||||
return None
|
||||
|
||||
status_list = raw_data.get("preview_feature_status", [])
|
||||
|
||||
# Convert list of objects to runtime set - if stored, it's enabled
|
||||
return {
|
||||
"preview_feature_status": {
|
||||
(item["domain"], item["preview_feature"]) for item in status_list
|
||||
}
|
||||
}
|
||||
|
||||
def _write_data(self, path: str, data: dict) -> None:
|
||||
"""Convert from runtime format to storage format and write.
|
||||
|
||||
Only saves enabled features - disabled is the default.
|
||||
"""
|
||||
# Extract the actual data (has version/key wrapper)
|
||||
actual_data = data.get("data", data)
|
||||
|
||||
# Check if this is Labs data (has preview_feature_status key)
|
||||
if "preview_feature_status" not in actual_data:
|
||||
# Not Labs data, write as-is
|
||||
super()._write_data(path, data)
|
||||
return
|
||||
|
||||
preview_status = actual_data["preview_feature_status"]
|
||||
|
||||
# Convert from runtime format (set of tuples) to storage format (list of dicts)
|
||||
status_list = [
|
||||
{"domain": domain, "preview_feature": preview_feature}
|
||||
for domain, preview_feature in preview_status
|
||||
]
|
||||
|
||||
# Build the final data structure with converted format
|
||||
data_copy = data.copy()
|
||||
data_copy["data"] = {"preview_feature_status": status_list}
|
||||
|
||||
super()._write_data(path, data_copy)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Labs component."""
|
||||
store = LabsStorage(hass, STORAGE_VERSION, STORAGE_KEY, private=True)
|
||||
data = await store.async_load()
|
||||
|
||||
if data is None:
|
||||
data = {"preview_feature_status": set()}
|
||||
|
||||
# Scan ALL integrations for lab preview features (loaded or not)
|
||||
lab_preview_features = await _async_scan_all_preview_features(hass)
|
||||
|
||||
# Clean up preview features that no longer exist
|
||||
if lab_preview_features:
|
||||
valid_keys = {
|
||||
(pf.domain, pf.preview_feature) for pf in lab_preview_features.values()
|
||||
}
|
||||
stale_keys = data["preview_feature_status"] - valid_keys
|
||||
|
||||
if stale_keys:
|
||||
_LOGGER.debug(
|
||||
"Removing %d stale preview features: %s",
|
||||
len(stale_keys),
|
||||
stale_keys,
|
||||
)
|
||||
data["preview_feature_status"] -= stale_keys
|
||||
|
||||
await store.async_save(data)
|
||||
|
||||
hass.data[LABS_DATA] = LabsData(
|
||||
store=store,
|
||||
data=data,
|
||||
preview_features=lab_preview_features,
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_list_preview_features)
|
||||
websocket_api.async_register_command(hass, websocket_update_preview_feature)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _populate_preview_features(
|
||||
preview_features: dict[str, LabPreviewFeature],
|
||||
domain: str,
|
||||
labs_preview_features: dict[str, dict[str, str]],
|
||||
is_built_in: bool = True,
|
||||
) -> None:
|
||||
"""Populate preview features dictionary from integration preview_features.
|
||||
|
||||
Args:
|
||||
preview_features: Dictionary to populate
|
||||
domain: Integration domain
|
||||
labs_preview_features: Dictionary of preview feature definitions from manifest
|
||||
is_built_in: Whether this is a built-in integration
|
||||
"""
|
||||
for preview_feature_key, preview_feature_data in labs_preview_features.items():
|
||||
preview_feature = LabPreviewFeature(
|
||||
domain=domain,
|
||||
preview_feature=preview_feature_key,
|
||||
is_built_in=is_built_in,
|
||||
feedback_url=preview_feature_data.get("feedback_url"),
|
||||
learn_more_url=preview_feature_data.get("learn_more_url"),
|
||||
report_issue_url=preview_feature_data.get("report_issue_url"),
|
||||
)
|
||||
preview_features[preview_feature.full_key] = preview_feature
|
||||
|
||||
|
||||
async def _async_scan_all_preview_features(
|
||||
hass: HomeAssistant,
|
||||
) -> dict[str, LabPreviewFeature]:
|
||||
"""Scan ALL available integrations for lab preview features (loaded or not)."""
|
||||
preview_features: dict[str, LabPreviewFeature] = {}
|
||||
|
||||
# Load pre-generated built-in lab preview features (already includes all data)
|
||||
for domain, domain_preview_features in LABS_PREVIEW_FEATURES.items():
|
||||
_populate_preview_features(
|
||||
preview_features, domain, domain_preview_features, is_built_in=True
|
||||
)
|
||||
|
||||
# Scan custom components
|
||||
custom_integrations = await async_get_custom_components(hass)
|
||||
_LOGGER.debug(
|
||||
"Loaded %d built-in + scanning %d custom integrations for lab preview features",
|
||||
len(preview_features),
|
||||
len(custom_integrations),
|
||||
)
|
||||
|
||||
for integration in custom_integrations.values():
|
||||
if labs_preview_features := integration.preview_features:
|
||||
_populate_preview_features(
|
||||
preview_features,
|
||||
integration.domain,
|
||||
labs_preview_features,
|
||||
is_built_in=False,
|
||||
)
|
||||
|
||||
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
|
||||
return preview_features
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data["preview_feature_status"]
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "labs/list"})
|
||||
def websocket_list_preview_features(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List all lab preview features filtered by loaded integrations."""
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
loaded_components = hass.config.components
|
||||
|
||||
preview_features: list[dict[str, Any]] = [
|
||||
preview_feature.to_dict(
|
||||
(preview_feature.domain, preview_feature.preview_feature)
|
||||
in labs_data.data["preview_feature_status"]
|
||||
)
|
||||
for preview_feature_key, preview_feature in labs_data.preview_features.items()
|
||||
if preview_feature.domain in loaded_components
|
||||
]
|
||||
|
||||
connection.send_result(msg["id"], {"features": preview_features})
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/update",
|
||||
vol.Required("domain"): str,
|
||||
vol.Required("preview_feature"): str,
|
||||
vol.Required("enabled"): bool,
|
||||
vol.Optional("create_backup", default=False): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_update_preview_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update a lab preview feature state."""
|
||||
domain = msg["domain"]
|
||||
preview_feature = msg["preview_feature"]
|
||||
enabled = msg["enabled"]
|
||||
create_backup = msg["create_backup"]
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
|
||||
# Build preview_feature_id for lookup
|
||||
preview_feature_id = f"{domain}.{preview_feature}"
|
||||
|
||||
# Validate preview feature exists
|
||||
if preview_feature_id not in labs_data.preview_features:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
f"Preview feature {preview_feature_id} not found",
|
||||
)
|
||||
return
|
||||
|
||||
# Create backup if requested and enabling
|
||||
if create_backup and enabled:
|
||||
try:
|
||||
backup_manager = async_get_manager(hass)
|
||||
await backup_manager.async_create_automatic_backup()
|
||||
except Exception as err: # noqa: BLE001 - websocket handlers can catch broad exceptions
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_UNKNOWN_ERROR,
|
||||
f"Error creating backup: {err}",
|
||||
)
|
||||
return
|
||||
|
||||
# Update storage (only store enabled features, remove if disabled)
|
||||
if enabled:
|
||||
labs_data.data["preview_feature_status"].add((domain, preview_feature))
|
||||
else:
|
||||
labs_data.data["preview_feature_status"].discard((domain, preview_feature))
|
||||
|
||||
# Save changes immediately
|
||||
await labs_data.store.async_save(labs_data.data)
|
||||
|
||||
# Fire event
|
||||
event_data: EventLabsUpdatedData = {
|
||||
"domain": domain,
|
||||
"preview_feature": preview_feature,
|
||||
"enabled": enabled,
|
||||
}
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
@@ -1,77 +0,0 @@
|
||||
"""Constants for the Home Assistant Labs integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
DOMAIN = "labs"
|
||||
|
||||
STORAGE_KEY = "core.labs"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
EVENT_LABS_UPDATED = "labs_updated"
|
||||
|
||||
|
||||
class EventLabsUpdatedData(TypedDict):
|
||||
"""Event data for labs_updated event."""
|
||||
|
||||
domain: str
|
||||
preview_feature: str
|
||||
enabled: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class LabPreviewFeature:
|
||||
"""Lab preview feature definition."""
|
||||
|
||||
domain: str
|
||||
preview_feature: str
|
||||
is_built_in: bool = True
|
||||
feedback_url: str | None = None
|
||||
learn_more_url: str | None = None
|
||||
report_issue_url: str | None = None
|
||||
|
||||
@property
|
||||
def full_key(self) -> str:
|
||||
"""Return the full key for the preview feature (domain.preview_feature)."""
|
||||
return f"{self.domain}.{self.preview_feature}"
|
||||
|
||||
def to_dict(self, enabled: bool) -> dict[str, str | bool | None]:
|
||||
"""Return a serialized version of the preview feature.
|
||||
|
||||
Args:
|
||||
enabled: Whether the preview feature is currently enabled
|
||||
|
||||
Returns:
|
||||
Dictionary with preview feature data including enabled status
|
||||
"""
|
||||
return {
|
||||
"preview_feature": self.preview_feature,
|
||||
"domain": self.domain,
|
||||
"enabled": enabled,
|
||||
"is_built_in": self.is_built_in,
|
||||
"feedback_url": self.feedback_url,
|
||||
"learn_more_url": self.learn_more_url,
|
||||
"report_issue_url": self.report_issue_url,
|
||||
}
|
||||
|
||||
|
||||
type LabsStoreData = dict[str, set[tuple[str, str]]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class LabsData:
|
||||
"""Storage class for Labs global data."""
|
||||
|
||||
store: Store[LabsStoreData]
|
||||
data: LabsStoreData
|
||||
preview_features: dict[str, LabPreviewFeature] = field(default_factory=dict)
|
||||
|
||||
|
||||
LABS_DATA: HassKey[LabsData] = HassKey(DOMAIN)
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"domain": "labs",
|
||||
"name": "Home Assistant Labs",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/labs",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"title": "Home Assistant Labs"
|
||||
}
|
||||
@@ -15,20 +15,16 @@ from pylamarzocco.const import FirmwareType
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
from pylamarzocco.util import InstallationKey, generate_installation_key
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
async_ble_device_from_address,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.components.bluetooth import async_discovered_service_info
|
||||
from homeassistant.const import (
|
||||
CONF_MAC,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
__version__,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
@@ -103,7 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
# initialize Bluetooth
|
||||
bluetooth_client: LaMarzoccoBluetoothClient | None = None
|
||||
if entry.options.get(CONF_USE_BLUETOOTH, True) and (
|
||||
token := (entry.data.get(CONF_TOKEN) or settings.ble_auth_token)
|
||||
token := settings.ble_auth_token
|
||||
):
|
||||
if CONF_MAC not in entry.data:
|
||||
for discovery_info in async_discovered_service_info(hass):
|
||||
@@ -112,7 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
and name.startswith(BT_MODEL_PREFIXES)
|
||||
and name.split("_")[1] == serial
|
||||
):
|
||||
_LOGGER.info("Found lamarzocco Bluetooth device, adding to entry")
|
||||
_LOGGER.debug("Found Bluetooth device, configuring with Bluetooth")
|
||||
# found a device, add MAC address to config entry
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
@@ -122,29 +118,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
},
|
||||
)
|
||||
|
||||
if not entry.data[CONF_TOKEN]:
|
||||
# update the token in the config entry
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_TOKEN: token,
|
||||
},
|
||||
)
|
||||
|
||||
if CONF_MAC in entry.data:
|
||||
ble_device = async_ble_device_from_address(hass, entry.data[CONF_MAC])
|
||||
if ble_device:
|
||||
_LOGGER.info("Setting up lamarzocco with Bluetooth")
|
||||
bluetooth_client = LaMarzoccoBluetoothClient(
|
||||
ble_device=ble_device,
|
||||
ble_token=token,
|
||||
)
|
||||
|
||||
async def disconnect_bluetooth(_: Event) -> None:
|
||||
"""Stop push updates when hass stops."""
|
||||
await bluetooth_client.disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, disconnect_bluetooth
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(bluetooth_client.disconnect)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Bluetooth device not found during lamarzocco setup, continuing with cloud only"
|
||||
)
|
||||
_LOGGER.debug("Initializing Bluetooth device")
|
||||
bluetooth_client = LaMarzoccoBluetoothClient(
|
||||
address_or_ble_device=entry.data[CONF_MAC],
|
||||
ble_token=token,
|
||||
)
|
||||
|
||||
device = LaMarzoccoMachine(
|
||||
serial_number=entry.unique_id,
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from asyncio import Task
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -15,7 +14,7 @@ from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -73,12 +72,10 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
return True
|
||||
return self._websocket_task.done()
|
||||
|
||||
async def __handle_internal_update(
|
||||
self, func: Callable[[], Coroutine[Any, Any, None]]
|
||||
) -> None:
|
||||
"""Handle update with error handling."""
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
try:
|
||||
await func()
|
||||
await self._internal_async_update_data()
|
||||
except AuthFail as ex:
|
||||
_LOGGER.debug("Authentication failed", exc_info=True)
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -90,17 +87,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
translation_domain=DOMAIN, translation_key="api_error"
|
||||
) from ex
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up coordinator."""
|
||||
await self.__handle_internal_update(self._internal_async_setup)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
await self.__handle_internal_update(self._internal_async_update_data)
|
||||
|
||||
async def _internal_async_setup(self) -> None:
|
||||
"""Actual setup logic."""
|
||||
|
||||
@abstractmethod
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Actual data update logic."""
|
||||
@@ -111,12 +97,6 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
|
||||
cloud_client: LaMarzoccoCloudClient
|
||||
|
||||
async def _internal_async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
await self.cloud_client.async_get_access_token()
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
|
||||
@@ -127,6 +107,9 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
if self.device.websocket.connected and not self.websocket_terminated:
|
||||
return
|
||||
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
|
||||
self._websocket_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self.connect_websocket(),
|
||||
@@ -148,13 +131,8 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
|
||||
self.async_update_listeners()
|
||||
|
||||
@callback
|
||||
def update_callback(_: Any | None = None) -> None:
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
self.async_set_updated_data(None)
|
||||
|
||||
await self.device.connect_dashboard_websocket(
|
||||
update_callback=update_callback,
|
||||
update_callback=lambda _: self.async_set_updated_data(None),
|
||||
connect_callback=self.async_update_listeners,
|
||||
disconnect_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.2.0"]
|
||||
"requirements": ["pylamarzocco==2.1.3"]
|
||||
}
|
||||
|
||||
@@ -73,17 +73,14 @@ class LcnBinarySensor(LcnEntity, BinarySensorEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_binary_sensors(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_binary_sensors(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusBinSensors):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_state(self.bin_sensor_port.value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -171,22 +171,20 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = any(
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_variable(
|
||||
self.setpoint, SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_variable(
|
||||
self.setpoint, SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set temperature value when LCN input object is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusVar):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
if input_obj.get_var() == self.variable:
|
||||
self._attr_current_temperature = float(
|
||||
input_obj.get_value().to_var_unit(self.unit)
|
||||
|
||||
@@ -133,15 +133,13 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
if not self.device_connection.is_group:
|
||||
self._attr_available = any(
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
@@ -151,7 +149,7 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
or input_obj.get_output_id() not in self.output_ids
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
if input_obj.get_percent() > 0: # motor is on
|
||||
if input_obj.get_output_id() == self.output_ids[0]:
|
||||
self._attr_is_opening = True
|
||||
@@ -274,12 +272,11 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
self.motor, self.positioning_mode, SCAN_INTERVAL.seconds
|
||||
)
|
||||
)
|
||||
self._attr_available = any(await asyncio.gather(*coros))
|
||||
await asyncio.gather(*coros)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set cover states when LCN input object (command) is received."""
|
||||
if isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
self._attr_available = True
|
||||
self._attr_is_opening = input_obj.is_opening(self.motor.value)
|
||||
self._attr_is_closing = input_obj.is_closing(self.motor.value)
|
||||
|
||||
@@ -296,7 +293,6 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
)
|
||||
and input_obj.motor == self.motor.value
|
||||
):
|
||||
self._attr_available = True
|
||||
self._attr_current_cover_position = int(input_obj.position)
|
||||
if self._attr_current_cover_position in [0, 100]:
|
||||
self._attr_is_opening = False
|
||||
|
||||
@@ -149,11 +149,8 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
@@ -163,7 +160,7 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
or input_obj.get_output_id() != self.output.value
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
percent = input_obj.get_percent()
|
||||
self._attr_brightness = value_to_brightness(BRIGHTNESS_SCALE, percent)
|
||||
self._attr_is_on = bool(percent)
|
||||
@@ -203,15 +200,12 @@ class LcnRelayLight(LcnEntity, LightEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
is not None
|
||||
)
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_state(self.output.value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -25,7 +25,7 @@ rules:
|
||||
status: exempt
|
||||
comment: Integration has no configuration parameters
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
@@ -133,11 +133,8 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
@@ -147,7 +144,7 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
or input_obj.get_var() != self.variable
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
is_regulator = self.variable.name in SETPOINTS
|
||||
self._attr_native_value = input_obj.get_value().to_var_unit(
|
||||
self.unit, is_regulator
|
||||
@@ -174,18 +171,15 @@ class LcnLedLogicSensor(LcnEntity, SensorEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_led_and_logic_ops(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_led_and_logic_ops(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusLedsAndLogicOps):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
if self.source in pypck.lcn_defs.LedPort:
|
||||
self._attr_native_value = input_obj.get_led_state(
|
||||
self.source.value
|
||||
|
||||
@@ -95,11 +95,8 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
@@ -109,7 +106,7 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
or input_obj.get_output_id() != self.output.value
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_percent() > 0
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -145,16 +142,13 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
is not None
|
||||
)
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_state(self.output.value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -189,11 +183,8 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_variable(
|
||||
self.setpoint_variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
await self.device_connection.request_status_variable(
|
||||
self.setpoint_variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
@@ -203,7 +194,7 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
or input_obj.get_var() != self.setpoint_variable
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_value().is_locked_regulator()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -245,12 +236,7 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
self._attr_available = (
|
||||
await self.device_connection.request_status_locked_keys(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
is not None
|
||||
)
|
||||
await self.device_connection.request_status_locked_keys(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
@@ -259,6 +245,6 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
or self.key not in pypck.lcn_defs.Key
|
||||
):
|
||||
return
|
||||
self._attr_available = True
|
||||
|
||||
self._attr_is_on = input_obj.get_state(self.table_id, self.key_id)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -341,7 +341,7 @@ class LIFXLight(LIFXEntity, LightEntity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
self.async_on_remove(
|
||||
self.manager.async_register_entity(self.entity_id, self.coordinator)
|
||||
self.manager.async_register_entity(self.entity_id, self.entry.entry_id)
|
||||
)
|
||||
return await super().async_added_to_hass()
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ from homeassistant.helpers.target import (
|
||||
)
|
||||
|
||||
from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DOMAIN
|
||||
from .coordinator import LIFXUpdateCoordinator
|
||||
from .coordinator import LIFXConfigEntry, LIFXUpdateCoordinator
|
||||
from .util import convert_8_to_16, find_hsbk
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -243,7 +243,7 @@ class LIFXManager:
|
||||
"""Initialize the manager."""
|
||||
self.hass = hass
|
||||
self.effects_conductor = aiolifx_effects.Conductor(hass.loop)
|
||||
self.entity_id_to_coordinator: dict[str, LIFXUpdateCoordinator] = {}
|
||||
self.entry_id_to_entity_id: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
def async_unload(self) -> None:
|
||||
@@ -253,15 +253,15 @@ class LIFXManager:
|
||||
|
||||
@callback
|
||||
def async_register_entity(
|
||||
self, entity_id: str, coordinator: LIFXUpdateCoordinator
|
||||
self, entity_id: str, entry_id: str
|
||||
) -> Callable[[], None]:
|
||||
"""Register an entity to the config entry id."""
|
||||
self.entity_id_to_coordinator[entity_id] = coordinator
|
||||
self.entry_id_to_entity_id[entry_id] = entity_id
|
||||
|
||||
@callback
|
||||
def unregister_entity() -> None:
|
||||
"""Unregister entity when it is being destroyed."""
|
||||
self.entity_id_to_coordinator.pop(entity_id)
|
||||
self.entry_id_to_entity_id.pop(entry_id)
|
||||
|
||||
return unregister_entity
|
||||
|
||||
@@ -499,11 +499,10 @@ class LIFXManager:
|
||||
coordinators: list[LIFXUpdateCoordinator] = []
|
||||
bulbs: list[Light] = []
|
||||
|
||||
coordinators = [
|
||||
coordinator
|
||||
for entity_id, coordinator in self.entity_id_to_coordinator.items()
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
bulbs = [coordinator.device for coordinator in coordinators]
|
||||
entry: LIFXConfigEntry
|
||||
for entry in self.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
if self.entry_id_to_entity_id[entry.entry_id] in entity_ids:
|
||||
coordinators.append(entry.runtime_data)
|
||||
bulbs.append(entry.runtime_data.device)
|
||||
if start_effect_func := self._effect_dispatch.get(service):
|
||||
await start_effect_func(self, bulbs, coordinators, **kwargs)
|
||||
|
||||
@@ -14,7 +14,6 @@ DEFAULT_NAME: Final = "Mastodon"
|
||||
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_VISIBILITY = "visibility"
|
||||
ATTR_IDEMPOTENCY_KEY = "idempotency_key"
|
||||
ATTR_CONTENT_WARNING = "content_warning"
|
||||
ATTR_MEDIA_WARNING = "media_warning"
|
||||
ATTR_MEDIA = "media"
|
||||
|
||||
@@ -15,7 +15,6 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from .const import (
|
||||
ATTR_CONTENT_WARNING,
|
||||
ATTR_IDEMPOTENCY_KEY,
|
||||
ATTR_LANGUAGE,
|
||||
ATTR_MEDIA,
|
||||
ATTR_MEDIA_DESCRIPTION,
|
||||
@@ -43,7 +42,6 @@ SERVICE_POST_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): str,
|
||||
vol.Required(ATTR_STATUS): str,
|
||||
vol.Optional(ATTR_VISIBILITY): vol.In([x.lower() for x in StatusVisibility]),
|
||||
vol.Optional(ATTR_IDEMPOTENCY_KEY): str,
|
||||
vol.Optional(ATTR_CONTENT_WARNING): str,
|
||||
vol.Optional(ATTR_LANGUAGE): str,
|
||||
vol.Optional(ATTR_MEDIA): str,
|
||||
@@ -79,33 +77,25 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID])
|
||||
client = entry.runtime_data.client
|
||||
|
||||
status: str = call.data[ATTR_STATUS]
|
||||
status = call.data[ATTR_STATUS]
|
||||
|
||||
visibility: str | None = (
|
||||
StatusVisibility(call.data[ATTR_VISIBILITY])
|
||||
if ATTR_VISIBILITY in call.data
|
||||
else None
|
||||
)
|
||||
idempotency_key: str | None = call.data.get(ATTR_IDEMPOTENCY_KEY)
|
||||
spoiler_text: str | None = call.data.get(ATTR_CONTENT_WARNING)
|
||||
language: str | None = call.data.get(ATTR_LANGUAGE)
|
||||
media_path: str | None = call.data.get(ATTR_MEDIA)
|
||||
media_description: str | None = call.data.get(ATTR_MEDIA_DESCRIPTION)
|
||||
media_warning: str | None = call.data.get(ATTR_MEDIA_WARNING)
|
||||
|
||||
if idempotency_key and len(idempotency_key) < 4:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="idempotency_key_too_short",
|
||||
)
|
||||
|
||||
await hass.async_add_executor_job(
|
||||
partial(
|
||||
_post,
|
||||
client=client,
|
||||
status=status,
|
||||
visibility=visibility,
|
||||
idempotency_key=idempotency_key,
|
||||
spoiler_text=spoiler_text,
|
||||
language=language,
|
||||
media_path=media_path,
|
||||
|
||||
@@ -18,9 +18,6 @@ post:
|
||||
- private
|
||||
- direct
|
||||
translation_key: post_visibility
|
||||
idempotency_key:
|
||||
selector:
|
||||
text:
|
||||
content_warning:
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -42,9 +42,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"idempotency_key_too_short": {
|
||||
"message": "Idempotency key must be at least 4 characters long."
|
||||
},
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
@@ -83,10 +80,6 @@
|
||||
"description": "A content warning will be shown before the status text is shown (default: no content warning).",
|
||||
"name": "Content warning"
|
||||
},
|
||||
"idempotency_key": {
|
||||
"description": "A unique key for this post. If specified then subsequent posts with the same key will be ignored by your Mastodon instance. Mastodon holds keys for up to one hour (default: no idempotency key, which allows duplication).",
|
||||
"name": "Idempotency key"
|
||||
},
|
||||
"language": {
|
||||
"description": "The language of the post (default: Mastodon account preference).",
|
||||
"name": "Language"
|
||||
|
||||
@@ -58,7 +58,7 @@ DISCOVERY_SCHEMAS = [
|
||||
platform=Platform.BUTTON,
|
||||
entity_description=MatterButtonEntityDescription(
|
||||
key="IdentifyButton",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
device_class=ButtonDeviceClass.IDENTIFY,
|
||||
command=lambda: clusters.Identify.Commands.Identify(identifyTime=15),
|
||||
),
|
||||
|
||||
@@ -87,11 +87,7 @@ CONDITION_CLASSES: dict[str, list[str]] = {
|
||||
"Averses de neige faible",
|
||||
"Quelques flocons",
|
||||
],
|
||||
ATTR_CONDITION_SNOWY_RAINY: [
|
||||
"Pluie et neige",
|
||||
"Pluie verglaçante",
|
||||
"Averses de pluie et neige",
|
||||
],
|
||||
ATTR_CONDITION_SNOWY_RAINY: ["Pluie et neige", "Pluie verglaçante"],
|
||||
ATTR_CONDITION_SUNNY: ["Ensoleillé"],
|
||||
ATTR_CONDITION_WINDY: [],
|
||||
ATTR_CONDITION_WINDY_VARIANT: [],
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -36,8 +36,8 @@ from .const import (
|
||||
COFFEE_SYSTEM_PROFILE,
|
||||
DISABLED_TEMP_ENTITIES,
|
||||
DOMAIN,
|
||||
PROGRAM_IDS,
|
||||
PROGRAM_PHASE,
|
||||
STATE_PROGRAM_ID,
|
||||
STATE_STATUS_TAGS,
|
||||
MieleAppliance,
|
||||
PlatePowerStep,
|
||||
@@ -979,16 +979,21 @@ class MieleProgramIdSensor(MieleSensor):
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return (
|
||||
PROGRAM_IDS[self.device.device_type](self.device.state_program_id).name
|
||||
if self.device.device_type in PROGRAM_IDS
|
||||
else None
|
||||
ret_val = STATE_PROGRAM_ID.get(self.device.device_type, {}).get(
|
||||
self.device.state_program_id
|
||||
)
|
||||
if ret_val is None:
|
||||
_LOGGER.debug(
|
||||
"Unknown program id: %s on device type: %s",
|
||||
self.device.state_program_id,
|
||||
self.device.device_type,
|
||||
)
|
||||
return ret_val
|
||||
|
||||
@property
|
||||
def options(self) -> list[str]:
|
||||
"""Return the options list for the actual device type."""
|
||||
return sorted(PROGRAM_IDS.get(self.device.device_type, {}).keys())
|
||||
return sorted(set(STATE_PROGRAM_ID.get(self.device.device_type, {}).values()))
|
||||
|
||||
|
||||
class MieleTimeSensor(MieleRestorableSensor):
|
||||
|
||||
@@ -430,7 +430,7 @@
|
||||
"custom_program_9": "Custom program 9",
|
||||
"dark_garments": "Dark garments",
|
||||
"dark_mixed_grain_bread": "Dark mixed grain bread",
|
||||
"decrystallise_honey": "Decrystallize honey",
|
||||
"decrystallise_honey": "Decrystallise honey",
|
||||
"defrost": "Defrost",
|
||||
"defrosting_with_microwave": "Defrosting with microwave",
|
||||
"defrosting_with_steam": "Defrosting with steam",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user