mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 16:17:20 +00:00
Merge branch 'dev' into fix-microsign-alt2
This commit is contained in:
commit
988320814f
8
.github/workflows/builder.yml
vendored
8
.github/workflows/builder.yml
vendored
@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
42
.github/workflows/ci.yaml
vendored
42
.github/workflows/ci.yaml
vendored
@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.6"
|
||||
HA_SHORT_VERSION: "2025.7"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@ -944,7 +944,8 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@ -1020,6 +1021,12 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@ -1070,7 +1077,8 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@ -1154,6 +1162,12 @@ jobs:
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@ -1202,7 +1216,8 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg
|
||||
libturbojpeg \
|
||||
libxml2-utils
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
@ -1290,6 +1305,12 @@ jobs:
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@ -1320,7 +1341,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@ -1357,7 +1378,8 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@ -1436,6 +1458,12 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@ -1463,7 +1491,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.17
|
||||
uses: github/codeql-action/init@v3.28.18
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.17
|
||||
uses: github/codeql-action/analyze@v3.28.18
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@ -66,6 +66,7 @@ homeassistant.components.alarm_control_panel.*
|
||||
homeassistant.components.alert.*
|
||||
homeassistant.components.alexa.*
|
||||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.amazon_devices.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambient_network.*
|
||||
@ -270,6 +271,7 @@ homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
@ -385,6 +387,7 @@ homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
|
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@ -89,6 +89,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/alert/ @home-assistant/core @frenck
|
||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/amazon_devices/ @chemelli74
|
||||
/tests/components/amazon_devices/ @chemelli74
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
@ -202,8 +204,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
||||
/tests/components/blue_current/ @Floris272 @gleeuwen
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
@ -303,6 +305,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/tests/components/cups/ @fabaff
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/date/ @home-assistant/core
|
||||
@ -710,6 +713,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/immich/ @mib1185
|
||||
/tests/components/immich/ @mib1185
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
@ -1138,6 +1143,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/paperless_ngx/ @fvgarrel
|
||||
/tests/components/paperless_ngx/ @fvgarrel
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
@ -1176,6 +1183,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
/tests/components/private_ble_device/ @Jc2k
|
||||
/homeassistant/components/probe_plus/ @pantherale0
|
||||
/tests/components/probe_plus/ @pantherale0
|
||||
/homeassistant/components/profiler/ @bdraco
|
||||
/tests/components/profiler/ @bdraco
|
||||
/homeassistant/components/progettihwsw/ @ardaseremet
|
||||
@ -1222,6 +1231,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/qnap_qsw/ @Noltari
|
||||
/tests/components/qnap_qsw/ @Noltari
|
||||
/homeassistant/components/quantum_gateway/ @cisasteelersfan
|
||||
/tests/components/quantum_gateway/ @cisasteelersfan
|
||||
/homeassistant/components/qvr_pro/ @oblogic7
|
||||
/homeassistant/components/qwikswitch/ @kellerza
|
||||
/tests/components/qwikswitch/ @kellerza
|
||||
@ -1410,6 +1420,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
|
||||
/tests/components/smarla/ @explicatis @rlint-explicatis
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
@ -1539,8 +1551,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @Petro31 @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
@ -171,8 +171,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
|
||||
# The substages preceding it should also have no timeout, until we ensure that the recorder
|
||||
# is not accidentally promoted as a dependency of any of the integrations in them.
|
||||
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
@ -929,7 +927,11 @@ async def _async_set_up_integrations(
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
async with hass.timeout.async_timeout(
|
||||
timeout,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message=f"Bootstrap stage {name} timeout",
|
||||
):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
@ -941,7 +943,11 @@ async def _async_set_up_integrations(
|
||||
# Wrap up startup
|
||||
_LOGGER.debug("Waiting for startup to wrap up")
|
||||
try:
|
||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
||||
async with hass.timeout.async_timeout(
|
||||
WRAP_UP_TIMEOUT,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message="Bootstrap startup wrap up timeout",
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
|
@ -3,6 +3,7 @@
|
||||
"name": "Amazon",
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"amazon_devices",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
|
6
homeassistant/brands/shelly.json
Normal file
6
homeassistant/brands/shelly.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "shelly",
|
||||
"name": "shelly",
|
||||
"integrations": ["shelly"],
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
@ -40,9 +40,10 @@ class AcmedaFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
entry.unique_id for entry in self._async_current_entries()
|
||||
}
|
||||
|
||||
hubs: list[aiopulse.Hub] = []
|
||||
with suppress(TimeoutError):
|
||||
async with timeout(5):
|
||||
hubs: list[aiopulse.Hub] = [
|
||||
hubs = [
|
||||
hub
|
||||
async for hub in aiopulse.Hub.discover()
|
||||
if hub.id not in already_configured
|
||||
|
@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
)
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN as AGENT_DOMAIN
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
|
||||
|
||||
@ -82,7 +82,7 @@ class AgentCamera(MjpegCamera):
|
||||
still_image_url=f"{device.client._server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}", # noqa: SLF001
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(AGENT_DOMAIN, self.unique_id)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer="Agent",
|
||||
model="Camera",
|
||||
name=f"{device.client.name} {device.name}",
|
||||
|
@ -5,23 +5,22 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
from airthings import Airthings
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_SECRET, DOMAIN
|
||||
from .const import CONF_SECRET
|
||||
from .coordinator import AirthingsDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
type AirthingsDataCoordinatorType = DataUpdateCoordinator[dict[str, AirthingsDevice]]
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataCoordinatorType]
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
|
||||
@ -32,21 +31,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
||||
async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _update_method() -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=_update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
36
homeassistant/components/airthings/coordinator.py
Normal file
36
homeassistant/components/airthings/coordinator.py
Normal file
@ -0,0 +1,36 @@
|
||||
"""The Airthings integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
|
||||
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
|
||||
"""Coordinator for Airthings data updates."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_method=self._update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.airthings = airthings
|
||||
|
||||
async def _update_method(self) -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await self.airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
@ -19,6 +19,7 @@ from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS,
|
||||
EntityCategory,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -27,8 +28,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirthingsConfigEntry, AirthingsDataCoordinatorType
|
||||
from . import AirthingsConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirthingsDataUpdateCoordinator
|
||||
|
||||
SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"radonShortTermAvg": SensorEntityDescription(
|
||||
@ -54,6 +56,12 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"sla": SensorEntityDescription(
|
||||
key="sla",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
@ -140,7 +148,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class AirthingsHeaterEnergySensor(
|
||||
CoordinatorEntity[AirthingsDataCoordinatorType], SensorEntity
|
||||
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of a Airthings Sensor device."""
|
||||
|
||||
@ -149,7 +157,7 @@ class AirthingsHeaterEnergySensor(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirthingsDataCoordinatorType,
|
||||
coordinator: AirthingsDataUpdateCoordinator,
|
||||
airthings_device: AirthingsDevice,
|
||||
entity_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
|
@ -142,7 +142,7 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
return AT_TO_HA_STATE[self._airtouch.acs[self._ac_number].AcMode]
|
||||
|
||||
@property
|
||||
def hvac_modes(self):
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of available operation modes."""
|
||||
airtouch_modes = self._airtouch.GetSupportedCoolingModesForAc(self._ac_number)
|
||||
modes = [AT_TO_HA_STATE[mode] for mode in airtouch_modes]
|
||||
@ -226,12 +226,12 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
def min_temp(self) -> float:
|
||||
"""Return Minimum Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MinSetpoint
|
||||
|
||||
@property
|
||||
def max_temp(self):
|
||||
def max_temp(self) -> float:
|
||||
"""Return Max Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint
|
||||
|
||||
|
32
homeassistant/components/amazon_devices/__init__.py
Normal file
32
homeassistant/components/amazon_devices/__init__.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""Amazon Devices integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.NOTIFY,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Set up Amazon Devices platform."""
|
||||
|
||||
coordinator = AmazonDevicesCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await entry.runtime_data.api.close()
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
71
homeassistant/components/amazon_devices/binary_sensor.py
Normal file
71
homeassistant/components/amazon_devices/binary_sensor.py
Normal file
@ -0,0 +1,71 @@
|
||||
"""Support for binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Amazon Devices binary sensor entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: Final = (
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="online",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
is_on_fn=lambda _device: _device.online,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda _device: _device.bluetooth_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices binary sensors based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in BINARY_SENSORS
|
||||
for serial_num in coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||
"""Binary sensor device."""
|
||||
|
||||
entity_description: AmazonBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
63
homeassistant/components/amazon_devices/config_flow.py
Normal file
63
homeassistant/components/amazon_devices/config_flow.py
Normal file
@ -0,0 +1,63 @@
|
||||
"""Config flow for Amazon Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.selector import CountrySelector
|
||||
|
||||
from .const import CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Amazon Devices."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if user_input:
|
||||
client = AmazonEchoApi(
|
||||
user_input[CONF_COUNTRY],
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
try:
|
||||
data = await client.login_mode_interactive(user_input[CONF_CODE])
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except CannotAuthenticate:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
await self.async_set_unique_id(data["customer_info"]["user_id"])
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input.pop(CONF_CODE)
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_USERNAME],
|
||||
data=user_input | {CONF_LOGIN_DATA: data},
|
||||
)
|
||||
finally:
|
||||
await client.close()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_COUNTRY, default=self.hass.config.country
|
||||
): CountrySelector(),
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_CODE): cv.string,
|
||||
}
|
||||
),
|
||||
)
|
8
homeassistant/components/amazon_devices/const.py
Normal file
8
homeassistant/components/amazon_devices/const.py
Normal file
@ -0,0 +1,8 @@
|
||||
"""Amazon Devices constants."""
|
||||
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "amazon_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
58
homeassistant/components/amazon_devices/coordinator.py
Normal file
58
homeassistant/components/amazon_devices/coordinator.py
Normal file
@ -0,0 +1,58 @@
|
||||
"""Support for Amazon Devices."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA
|
||||
|
||||
SCAN_INTERVAL = 30
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
|
||||
class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
"""Base coordinator for Amazon Devices."""
|
||||
|
||||
config_entry: AmazonConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=entry.title,
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
entry.data[CONF_COUNTRY],
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryError("Could not authenticate") from err
|
66
homeassistant/components/amazon_devices/diagnostics.py
Normal file
66
homeassistant/components/amazon_devices/diagnostics.py
Normal file
@ -0,0 +1,66 @@
|
||||
"""Diagnostics support for Amazon Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_PASSWORD, CONF_USERNAME, CONF_NAME, "title"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
devices: list[dict[str, dict[str, Any]]] = [
|
||||
build_device_data(device) for device in coordinator.data.values()
|
||||
]
|
||||
|
||||
return {
|
||||
"entry": async_redact_data(entry.as_dict(), TO_REDACT),
|
||||
"device_info": {
|
||||
"last_update success": coordinator.last_update_success,
|
||||
"last_exception": repr(coordinator.last_exception),
|
||||
"devices": devices,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry, device_entry: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
assert device_entry.serial_number
|
||||
|
||||
return build_device_data(coordinator.data[device_entry.serial_number])
|
||||
|
||||
|
||||
def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"""Build device data for diagnostics."""
|
||||
return {
|
||||
"account name": device.account_name,
|
||||
"capabilities": device.capabilities,
|
||||
"device family": device.device_family,
|
||||
"device type": device.device_type,
|
||||
"device cluster members": device.device_cluster_members,
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
}
|
57
homeassistant/components/amazon_devices/entity.py
Normal file
57
homeassistant/components/amazon_devices/entity.py
Normal file
@ -0,0 +1,57 @@
|
||||
"""Defines a base Amazon Devices entity."""
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
|
||||
|
||||
class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
"""Defines a base Amazon Devices entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
serial_num: str,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_num = serial_num
|
||||
model_details = coordinator.api.get_model_details(self.device)
|
||||
model = model_details["model"] if model_details else None
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_num)},
|
||||
name=self.device.account_name,
|
||||
model=model,
|
||||
model_id=self.device.device_type,
|
||||
manufacturer="Amazon",
|
||||
hw_version=model_details["hw_version"] if model_details else None,
|
||||
sw_version=(
|
||||
self.device.software_version if model != SPEAKER_GROUP_MODEL else None
|
||||
),
|
||||
serial_number=serial_num if model != SPEAKER_GROUP_MODEL else None,
|
||||
)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{serial_num}-{description.key}"
|
||||
|
||||
@property
|
||||
def device(self) -> AmazonDevice:
|
||||
"""Return the device."""
|
||||
return self.coordinator.data[self._serial_num]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._serial_num in self.coordinator.data
|
||||
and self.device.online
|
||||
)
|
12
homeassistant/components/amazon_devices/icons.json
Normal file
12
homeassistant/components/amazon_devices/icons.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"default": "mdi:bluetooth",
|
||||
"state": {
|
||||
"off": "mdi:bluetooth-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
122
homeassistant/components/amazon_devices/manifest.json
Normal file
122
homeassistant/components/amazon_devices/manifest.json
Normal file
@ -0,0 +1,122 @@
|
||||
{
|
||||
"domain": "amazon_devices",
|
||||
"name": "Amazon Devices",
|
||||
"codeowners": ["@chemelli74"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{ "macaddress": "007147*" },
|
||||
{ "macaddress": "00FC8B*" },
|
||||
{ "macaddress": "0812A5*" },
|
||||
{ "macaddress": "086AE5*" },
|
||||
{ "macaddress": "08849D*" },
|
||||
{ "macaddress": "089115*" },
|
||||
{ "macaddress": "08A6BC*" },
|
||||
{ "macaddress": "08C224*" },
|
||||
{ "macaddress": "0CDC91*" },
|
||||
{ "macaddress": "0CEE99*" },
|
||||
{ "macaddress": "1009F9*" },
|
||||
{ "macaddress": "109693*" },
|
||||
{ "macaddress": "10BF67*" },
|
||||
{ "macaddress": "10CE02*" },
|
||||
{ "macaddress": "140AC5*" },
|
||||
{ "macaddress": "149138*" },
|
||||
{ "macaddress": "1848BE*" },
|
||||
{ "macaddress": "1C12B0*" },
|
||||
{ "macaddress": "1C4D66*" },
|
||||
{ "macaddress": "1C93C4*" },
|
||||
{ "macaddress": "1CFE2B*" },
|
||||
{ "macaddress": "244CE3*" },
|
||||
{ "macaddress": "24CE33*" },
|
||||
{ "macaddress": "2873F6*" },
|
||||
{ "macaddress": "2C71FF*" },
|
||||
{ "macaddress": "34AFB3*" },
|
||||
{ "macaddress": "34D270*" },
|
||||
{ "macaddress": "38F73D*" },
|
||||
{ "macaddress": "3C5CC4*" },
|
||||
{ "macaddress": "3CE441*" },
|
||||
{ "macaddress": "440049*" },
|
||||
{ "macaddress": "40A2DB*" },
|
||||
{ "macaddress": "40A9CF*" },
|
||||
{ "macaddress": "40B4CD*" },
|
||||
{ "macaddress": "443D54*" },
|
||||
{ "macaddress": "44650D*" },
|
||||
{ "macaddress": "485F2D*" },
|
||||
{ "macaddress": "48785E*" },
|
||||
{ "macaddress": "48B423*" },
|
||||
{ "macaddress": "4C1744*" },
|
||||
{ "macaddress": "4CEFC0*" },
|
||||
{ "macaddress": "5007C3*" },
|
||||
{ "macaddress": "50D45C*" },
|
||||
{ "macaddress": "50DCE7*" },
|
||||
{ "macaddress": "50F5DA*" },
|
||||
{ "macaddress": "5C415A*" },
|
||||
{ "macaddress": "6837E9*" },
|
||||
{ "macaddress": "6854FD*" },
|
||||
{ "macaddress": "689A87*" },
|
||||
{ "macaddress": "68B691*" },
|
||||
{ "macaddress": "68DBF5*" },
|
||||
{ "macaddress": "68F63B*" },
|
||||
{ "macaddress": "6C0C9A*" },
|
||||
{ "macaddress": "6C5697*" },
|
||||
{ "macaddress": "7458F3*" },
|
||||
{ "macaddress": "74C246*" },
|
||||
{ "macaddress": "74D637*" },
|
||||
{ "macaddress": "74E20C*" },
|
||||
{ "macaddress": "74ECB2*" },
|
||||
{ "macaddress": "786C84*" },
|
||||
{ "macaddress": "78A03F*" },
|
||||
{ "macaddress": "7C6166*" },
|
||||
{ "macaddress": "7C6305*" },
|
||||
{ "macaddress": "7CD566*" },
|
||||
{ "macaddress": "8871E5*" },
|
||||
{ "macaddress": "901195*" },
|
||||
{ "macaddress": "90235B*" },
|
||||
{ "macaddress": "90A822*" },
|
||||
{ "macaddress": "90F82E*" },
|
||||
{ "macaddress": "943A91*" },
|
||||
{ "macaddress": "98226E*" },
|
||||
{ "macaddress": "98CCF3*" },
|
||||
{ "macaddress": "9CC8E9*" },
|
||||
{ "macaddress": "A002DC*" },
|
||||
{ "macaddress": "A0D2B1*" },
|
||||
{ "macaddress": "A40801*" },
|
||||
{ "macaddress": "A8E621*" },
|
||||
{ "macaddress": "AC416A*" },
|
||||
{ "macaddress": "AC63BE*" },
|
||||
{ "macaddress": "ACCCFC*" },
|
||||
{ "macaddress": "B0739C*" },
|
||||
{ "macaddress": "B0CFCB*" },
|
||||
{ "macaddress": "B0F7C4*" },
|
||||
{ "macaddress": "B85F98*" },
|
||||
{ "macaddress": "C091B9*" },
|
||||
{ "macaddress": "C095CF*" },
|
||||
{ "macaddress": "C49500*" },
|
||||
{ "macaddress": "C86C3D*" },
|
||||
{ "macaddress": "CC9EA2*" },
|
||||
{ "macaddress": "CCF735*" },
|
||||
{ "macaddress": "DC54D7*" },
|
||||
{ "macaddress": "D8BE65*" },
|
||||
{ "macaddress": "D8FBD6*" },
|
||||
{ "macaddress": "DC91BF*" },
|
||||
{ "macaddress": "DCA0D0*" },
|
||||
{ "macaddress": "E0F728*" },
|
||||
{ "macaddress": "EC2BEB*" },
|
||||
{ "macaddress": "EC8AC4*" },
|
||||
{ "macaddress": "ECA138*" },
|
||||
{ "macaddress": "F02F9E*" },
|
||||
{ "macaddress": "F0272D*" },
|
||||
{ "macaddress": "F0F0A4*" },
|
||||
{ "macaddress": "F4032A*" },
|
||||
{ "macaddress": "F854B8*" },
|
||||
{ "macaddress": "FC492D*" },
|
||||
{ "macaddress": "FC65DE*" },
|
||||
{ "macaddress": "FCA183*" },
|
||||
{ "macaddress": "FCE9D8*" }
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/amazon_devices",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.0.4"]
|
||||
}
|
74
homeassistant/components/amazon_devices/notify.py
Normal file
74
homeassistant/components/amazon_devices/notify.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""Support for notification entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonNotifyEntityDescription(NotifyEntityDescription):
|
||||
"""Amazon Devices notify entity description."""
|
||||
|
||||
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
|
||||
subkey: str
|
||||
|
||||
|
||||
NOTIFY: Final = (
|
||||
AmazonNotifyEntityDescription(
|
||||
key="speak",
|
||||
translation_key="speak",
|
||||
subkey="AUDIO_PLAYER",
|
||||
method=lambda api, device, message: api.call_alexa_speak(device, message),
|
||||
),
|
||||
AmazonNotifyEntityDescription(
|
||||
key="announce",
|
||||
translation_key="announce",
|
||||
subkey="AUDIO_PLAYER",
|
||||
method=lambda api, device, message: api.call_alexa_announcement(
|
||||
device, message
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices notification entity based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
|
||||
for sensor_desc in NOTIFY
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
)
|
||||
|
||||
|
||||
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
|
||||
"""Binary sensor notify platform."""
|
||||
|
||||
entity_description: AmazonNotifyEntityDescription
|
||||
|
||||
async def async_send_message(
|
||||
self, message: str, title: str | None = None, **kwargs: Any
|
||||
) -> None:
|
||||
"""Send a message."""
|
||||
|
||||
await self.entity_description.method(self.coordinator.api, self.device, message)
|
74
homeassistant/components/amazon_devices/quality_scale.yaml
Normal file
74
homeassistant/components/amazon_devices/quality_scale.yaml
Normal file
@ -0,0 +1,74 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: no actions
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: no actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: entities do not explicitly subscribe to events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: all tests missing
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Network information not relevant
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: done
|
60
homeassistant/components/amazon_devices/strings.json
Normal file
60
homeassistant/components/amazon_devices/strings.json
Normal file
@ -0,0 +1,60 @@
|
||||
{
|
||||
"common": {
|
||||
"data_country": "Country code",
|
||||
"data_code": "One-time password (OTP code)",
|
||||
"data_description_country": "The country of your Amazon account.",
|
||||
"data_description_username": "The email address of your Amazon account.",
|
||||
"data_description_password": "The password of your Amazon account.",
|
||||
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported."
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{username}",
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "[%key:component::amazon_devices::common::data_country%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::amazon_devices::common::data_description_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"country": "[%key:component::amazon_devices::common::data_description_country%]",
|
||||
"username": "[%key:component::amazon_devices::common::data_description_username%]",
|
||||
"password": "[%key:component::amazon_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::amazon_devices::common::data_description_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth"
|
||||
}
|
||||
},
|
||||
"notify": {
|
||||
"speak": {
|
||||
"name": "Speak"
|
||||
},
|
||||
"announce": {
|
||||
"name": "Announce"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"do_not_disturb": {
|
||||
"name": "Do not disturb"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
84
homeassistant/components/amazon_devices/switch.py
Normal file
84
homeassistant/components/amazon_devices/switch.py
Normal file
@ -0,0 +1,84 @@
|
||||
"""Support for switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Amazon Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
subkey: str
|
||||
method: str
|
||||
|
||||
|
||||
SWITCHES: Final = (
|
||||
AmazonSwitchEntityDescription(
|
||||
key="do_not_disturb",
|
||||
subkey="AUDIO_PLAYER",
|
||||
translation_key="do_not_disturb",
|
||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
||||
method="set_do_not_disturb",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices switches based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||
for switch_desc in SWITCHES
|
||||
for serial_num in coordinator.data
|
||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
)
|
||||
|
||||
|
||||
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||
"""Switch device."""
|
||||
|
||||
entity_description: AmazonSwitchEntityDescription
|
||||
|
||||
async def _switch_set_state(self, state: bool) -> None:
|
||||
"""Set desired switch state."""
|
||||
method = getattr(self.coordinator.api, self.entity_description.method)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert method is not None
|
||||
|
||||
await method(self.device, state)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self._switch_set_state(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self._switch_set_state(False)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if switch is on."""
|
||||
return self.entity_description.is_on_fn(self.device)
|
@ -24,7 +24,7 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, __version__ as HA_VERSION
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@ -225,7 +225,8 @@ class Analytics:
|
||||
LOGGER.error(err)
|
||||
return
|
||||
|
||||
configuration_set = set(yaml_configuration)
|
||||
configuration_set = _domains_from_yaml_config(yaml_configuration)
|
||||
|
||||
er_platforms = {
|
||||
entity.platform
|
||||
for entity in ent_reg.entities.values()
|
||||
@ -370,3 +371,13 @@ class Analytics:
|
||||
for entry in entries
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
domains = set(yaml_configuration)
|
||||
for platforms in conf_util.extract_platform_integrations(
|
||||
yaml_configuration, BASE_PLATFORMS
|
||||
).values():
|
||||
domains.update(platforms)
|
||||
return domains
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.2.1"],
|
||||
"requirements": ["androidtvremote2==0.2.2"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
@ -51,6 +51,10 @@
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
},
|
||||
"data_description": {
|
||||
"app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android",
|
||||
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,4 +17,11 @@ CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]
|
||||
THINKING_MODELS = [
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-7-sonnet-latest",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-opus-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
]
|
||||
|
@ -294,6 +294,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
@ -326,6 +328,7 @@ class AnthropicConversationEntity(
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
|
@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.47.2"]
|
||||
"requirements": ["anthropic==0.52.0"]
|
||||
}
|
||||
|
@ -46,11 +46,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(step_id="user", data_schema=_SCHEMA)
|
||||
|
||||
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
|
||||
# Abort if an entry with same host and port is present.
|
||||
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
|
||||
|
||||
# Test the connection to the host and get the current status for serial number.
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
@ -67,3 +63,30 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
title = data.name or data.model or data.serial_no or "APC UPS"
|
||||
return self.async_create_entry(title=title, data=user_input)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of an existing entry."""
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reconfigure", data_schema=_SCHEMA)
|
||||
|
||||
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
|
||||
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TIMEOUT):
|
||||
data = APCUPSdData(await aioapcaccess.request_status(host, port))
|
||||
except (OSError, asyncio.IncompleteReadError, TimeoutError):
|
||||
errors = {"base": "cannot_connect"}
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
await self.async_set_unique_id(data.serial_no)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_apcupsd_daemon")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
@ -1,7 +1,9 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"wrong_apcupsd_daemon": "The reconfigured APC UPS Daemon is not the same as the one already configured.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
|
@ -62,6 +62,8 @@ async def async_setup_entry(
|
||||
target_humidity_key=Attribute.HUMIDIFICATION_SETPOINT,
|
||||
min_humidity=10,
|
||||
max_humidity=50,
|
||||
auto_status_key=Attribute.HUMIDIFICATION_AVAILABLE,
|
||||
auto_status_value=1,
|
||||
default_humidity=30,
|
||||
set_humidity_fn=coordinator.client.set_humidification_setpoint,
|
||||
)
|
||||
@ -77,6 +79,8 @@ async def async_setup_entry(
|
||||
action_map=DEHUMIDIFIER_ACTION_MAP,
|
||||
current_humidity_key=Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE,
|
||||
target_humidity_key=Attribute.DEHUMIDIFICATION_SETPOINT,
|
||||
auto_status_key=None,
|
||||
auto_status_value=None,
|
||||
min_humidity=40,
|
||||
max_humidity=90,
|
||||
default_humidity=60,
|
||||
@ -100,6 +104,8 @@ class AprilaireHumidifierDescription(HumidifierEntityDescription):
|
||||
target_humidity_key: str
|
||||
min_humidity: int
|
||||
max_humidity: int
|
||||
auto_status_key: str | None
|
||||
auto_status_value: int | None
|
||||
default_humidity: int
|
||||
set_humidity_fn: Callable[[int], Awaitable]
|
||||
|
||||
@ -163,14 +169,31 @@ class AprilaireHumidifierEntity(BaseAprilaireEntity, HumidifierEntity):
|
||||
def min_humidity(self) -> float:
|
||||
"""Return the minimum humidity."""
|
||||
|
||||
if self.is_auto_humidity_mode():
|
||||
return 1
|
||||
|
||||
return self.entity_description.min_humidity
|
||||
|
||||
@property
|
||||
def max_humidity(self) -> float:
|
||||
"""Return the maximum humidity."""
|
||||
|
||||
if self.is_auto_humidity_mode():
|
||||
return 7
|
||||
|
||||
return self.entity_description.max_humidity
|
||||
|
||||
def is_auto_humidity_mode(self) -> bool:
|
||||
"""Return whether the humidifier is in auto mode."""
|
||||
|
||||
if self.entity_description.auto_status_key is None:
|
||||
return False
|
||||
|
||||
return (
|
||||
self.coordinator.data.get(self.entity_description.auto_status_key)
|
||||
== self.entity_description.auto_status_value
|
||||
)
|
||||
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set the humidity."""
|
||||
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.8.1"]
|
||||
"requirements": ["pyaprilaire==0.9.1"]
|
||||
}
|
||||
|
@ -1,6 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"last_update": {
|
||||
"default": "mdi:update"
|
||||
},
|
||||
"salt_left_side_percentage": {
|
||||
"default": "mdi:basket-fill"
|
||||
},
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
from aioaquacell import Softener
|
||||
|
||||
@ -28,7 +29,7 @@ PARALLEL_UPDATES = 1
|
||||
class SoftenerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Softener sensor entity."""
|
||||
|
||||
value_fn: Callable[[Softener], StateType]
|
||||
value_fn: Callable[[Softener], StateType | datetime]
|
||||
|
||||
|
||||
SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
@ -77,6 +78,12 @@ SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
"low",
|
||||
],
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="last_update",
|
||||
translation_key="last_update",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda softener: softener.lastUpdate,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@ -111,6 +118,6 @@ class SoftenerSensor(AquacellEntity, SensorEntity):
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.softener)
|
||||
|
@ -21,6 +21,9 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"last_update": {
|
||||
"name": "Last update"
|
||||
},
|
||||
"salt_left_side_percentage": {
|
||||
"name": "Salt left side percentage"
|
||||
},
|
||||
|
@ -20,9 +20,6 @@ import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@ -92,6 +89,8 @@ KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
# Number of response parts to handle before streaming the response
|
||||
STREAM_RESPONSE_CHARS = 60
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@ -555,7 +554,7 @@ class PipelineRun:
|
||||
event_callback: PipelineEventCallback
|
||||
language: str = None # type: ignore[assignment]
|
||||
runner_data: Any | None = None
|
||||
intent_agent: str | None = None
|
||||
intent_agent: conversation.AgentInfo | None = None
|
||||
tts_audio_output: str | dict[str, Any] | None = None
|
||||
wake_word_settings: WakeWordSettings | None = None
|
||||
audio_settings: AudioSettings = field(default_factory=AudioSettings)
|
||||
@ -591,6 +590,9 @@ class PipelineRun:
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
_streamed_response_text = False
|
||||
"""If the conversation agent streamed response text to TTS result."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@ -652,6 +654,11 @@ class PipelineRun:
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
"stream_response": (
|
||||
self.tts_stream.supports_streaming_input
|
||||
and self.intent_agent
|
||||
and self.intent_agent.supports_streaming
|
||||
),
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
@ -899,12 +906,12 @@ class PipelineRun:
|
||||
) -> str:
|
||||
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
|
||||
# Create a background task to prepare the conversation agent
|
||||
if self.end_stage >= PipelineStage.INTENT:
|
||||
if self.end_stage >= PipelineStage.INTENT and self.intent_agent:
|
||||
self.hass.async_create_background_task(
|
||||
conversation.async_prepare_agent(
|
||||
self.hass, self.intent_agent, self.language
|
||||
self.hass, self.intent_agent.id, self.language
|
||||
),
|
||||
f"prepare conversation agent {self.intent_agent}",
|
||||
f"prepare conversation agent {self.intent_agent.id}",
|
||||
)
|
||||
|
||||
if isinstance(self.stt_provider, stt.Provider):
|
||||
@ -1045,7 +1052,7 @@ class PipelineRun:
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info.id
|
||||
self.intent_agent = agent_info
|
||||
|
||||
async def recognize_intent(
|
||||
self,
|
||||
@ -1078,7 +1085,7 @@ class PipelineRun:
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_START,
|
||||
{
|
||||
"engine": self.intent_agent,
|
||||
"engine": self.intent_agent.id,
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
@ -1095,11 +1102,11 @@ class PipelineRun:
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
@ -1121,7 +1128,7 @@ class PipelineRun:
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent)
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
@ -1143,6 +1150,13 @@ class PipelineRun:
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
tts_input_stream = None
|
||||
chat_log_role = None
|
||||
delta_character_count = 0
|
||||
|
||||
@callback
|
||||
def chat_log_delta_listener(
|
||||
chat_log: conversation.ChatLog, delta: dict
|
||||
@ -1156,6 +1170,61 @@ class PipelineRun:
|
||||
},
|
||||
)
|
||||
)
|
||||
if tts_input_stream is None:
|
||||
return
|
||||
|
||||
nonlocal chat_log_role
|
||||
|
||||
if role := delta.get("role"):
|
||||
chat_log_role = role
|
||||
|
||||
# We are only interested in assistant deltas
|
||||
if chat_log_role != "assistant":
|
||||
return
|
||||
|
||||
if content := delta.get("content"):
|
||||
tts_input_stream.put_nowait(content)
|
||||
|
||||
if self._streamed_response_text:
|
||||
return
|
||||
|
||||
nonlocal delta_character_count
|
||||
|
||||
# Streamed responses are not cached. That's why we only start streaming text after
|
||||
# we have received enough characters that indicates it will be a long response
|
||||
# or if we have received text, and then a tool call.
|
||||
|
||||
# Tool call after we already received text
|
||||
start_streaming = delta_character_count > 0 and delta.get("tool_calls")
|
||||
|
||||
# Count characters in the content and test if we exceed streaming threshold
|
||||
if not start_streaming and content:
|
||||
delta_character_count += len(content)
|
||||
start_streaming = delta_character_count > STREAM_RESPONSE_CHARS
|
||||
|
||||
if not start_streaming:
|
||||
return
|
||||
|
||||
self._streamed_response_text = True
|
||||
|
||||
async def tts_input_stream_generator() -> AsyncGenerator[str]:
|
||||
"""Yield TTS input stream."""
|
||||
while (tts_input := await tts_input_stream.get()) is not None:
|
||||
yield tts_input
|
||||
|
||||
# Concatenate all existing queue items
|
||||
parts = []
|
||||
while not tts_input_stream.empty():
|
||||
parts.append(tts_input_stream.get_nowait())
|
||||
tts_input_stream.put_nowait(
|
||||
"".join(
|
||||
# At this point parts is only strings, None indicates end of queue
|
||||
cast(list[str], parts)
|
||||
)
|
||||
)
|
||||
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
@ -1199,6 +1268,8 @@ class PipelineRun:
|
||||
speech = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
@ -1276,26 +1347,11 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
# Synthesize audio and get URL
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
raise TextToSpeechError(
|
||||
code="tts-failed",
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
if not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
"media_id": self.tts_stream.media_source_id,
|
||||
"token": self.tts_stream.token,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
|
@ -11,7 +11,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN
|
||||
from .config import AxisConfig
|
||||
from .entity_loader import AxisEntityLoader
|
||||
from .event_source import AxisEventSource
|
||||
@ -79,7 +79,7 @@ class AxisHub:
|
||||
config_entry_id=self.config.entry.entry_id,
|
||||
configuration_url=self.api.config.url,
|
||||
connections={(CONNECTION_NETWORK_MAC, self.unique_id)},
|
||||
identifiers={(AXIS_DOMAIN, self.unique_id)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer=ATTR_MANUFACTURER,
|
||||
model=f"{self.config.model} {self.product_type}",
|
||||
name=self.config.name,
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
from azure.core.exceptions import (
|
||||
AzureError,
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
@ -70,7 +70,7 @@ async def async_setup_entry(
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except HttpResponseError as err:
|
||||
except AzureError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
|
@ -8,7 +8,7 @@ import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
|
||||
from azure.storage.blob import BlobProperties
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
@ -80,6 +80,20 @@ def handle_backup_errors[_R, **P](
|
||||
f"Error during backup operation in {func.__name__}:"
|
||||
f" Status {err.status_code}, message: {err.message}"
|
||||
) from err
|
||||
except ServiceRequestError as err:
|
||||
raise BackupAgentError(
|
||||
f"Timeout during backup operation in {func.__name__}"
|
||||
) from err
|
||||
except AzureError as err:
|
||||
_LOGGER.debug(
|
||||
"Error during backup in %s: %s",
|
||||
func.__name__,
|
||||
err,
|
||||
exc_info=True,
|
||||
)
|
||||
raise BackupAgentError(
|
||||
f"Error during backup operation in {func.__name__}: {err}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
@ -23,6 +23,7 @@ from .const import DATA_MANAGER, DOMAIN
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
AddonErrorData,
|
||||
BackupManager,
|
||||
BackupManagerError,
|
||||
BackupPlatformEvent,
|
||||
@ -48,6 +49,7 @@ from .util import suggested_filename, suggested_filename_from_name_date
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"AddonErrorData",
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"BackupAgent",
|
||||
@ -79,7 +81,7 @@ __all__ = [
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.EVENT, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
@ -33,6 +33,7 @@ class BackupCoordinatorData:
|
||||
last_attempted_automatic_backup: datetime | None
|
||||
last_successful_automatic_backup: datetime | None
|
||||
next_scheduled_automatic_backup: datetime | None
|
||||
last_event: ManagerStateEvent | BackupPlatformEvent | None
|
||||
|
||||
|
||||
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
@ -60,11 +61,13 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
self._last_event: ManagerStateEvent | BackupPlatformEvent | None = None
|
||||
|
||||
@callback
|
||||
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
|
||||
"""Handle new event."""
|
||||
LOGGER.debug("Received backup event: %s", event)
|
||||
self._last_event = event
|
||||
self.config_entry.async_create_task(self.hass, self.async_refresh())
|
||||
|
||||
async def _async_update_data(self) -> BackupCoordinatorData:
|
||||
@ -74,6 +77,7 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
self.backup_manager.config.data.last_attempted_automatic_backup,
|
||||
self.backup_manager.config.data.last_completed_automatic_backup,
|
||||
self.backup_manager.config.data.schedule.next_automatic_backup,
|
||||
self._last_event,
|
||||
)
|
||||
|
||||
@callback
|
||||
|
@ -11,7 +11,7 @@ from .const import DOMAIN
|
||||
from .coordinator import BackupDataUpdateCoordinator
|
||||
|
||||
|
||||
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
"""Base entity for backup manager."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@ -19,12 +19,9 @@ class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, "backup_manager")},
|
||||
manufacturer="Home Assistant",
|
||||
@ -34,3 +31,17 @@ class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
configuration_url="homeassistant://config/backup",
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerEntity(BackupManagerBaseEntity):
|
||||
"""Entity for backup manager."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BackupDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = entity_description.key
|
||||
|
59
homeassistant/components/backup/event.py
Normal file
59
homeassistant/components/backup/event.py
Normal file
@ -0,0 +1,59 @@
|
||||
"""Event platform for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.components.event import EventEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
|
||||
from .entity import BackupManagerBaseEntity
|
||||
from .manager import CreateBackupEvent, CreateBackupState
|
||||
|
||||
ATTR_BACKUP_STAGE: Final[str] = "backup_stage"
|
||||
ATTR_FAILED_REASON: Final[str] = "failed_reason"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BackupConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Event set up for backup config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
async_add_entities([AutomaticBackupEvent(coordinator)])
|
||||
|
||||
|
||||
class AutomaticBackupEvent(BackupManagerBaseEntity, EventEntity):
|
||||
"""Representation of an automatic backup event."""
|
||||
|
||||
_attr_event_types = [s.value for s in CreateBackupState]
|
||||
_unrecorded_attributes = frozenset({ATTR_FAILED_REASON, ATTR_BACKUP_STAGE})
|
||||
coordinator: BackupDataUpdateCoordinator
|
||||
|
||||
def __init__(self, coordinator: BackupDataUpdateCoordinator) -> None:
|
||||
"""Initialize the automatic backup event."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = "automatic_backup_event"
|
||||
self._attr_translation_key = "automatic_backup_event"
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if (
|
||||
not (data := self.coordinator.data)
|
||||
or (event := data.last_event) is None
|
||||
or not isinstance(event, CreateBackupEvent)
|
||||
):
|
||||
return
|
||||
|
||||
self._trigger_event(
|
||||
event.state,
|
||||
{
|
||||
ATTR_BACKUP_STAGE: event.stage,
|
||||
ATTR_FAILED_REASON: event.reason,
|
||||
},
|
||||
)
|
||||
self.async_write_ha_state()
|
@ -1,4 +1,11 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"automatic_backup_event": {
|
||||
"default": "mdi:database"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"service": "mdi:cloud-upload"
|
||||
|
@ -62,6 +62,7 @@ from .const import (
|
||||
LOGGER,
|
||||
)
|
||||
from .models import (
|
||||
AddonInfo,
|
||||
AgentBackup,
|
||||
BackupError,
|
||||
BackupManagerError,
|
||||
@ -102,15 +103,27 @@ class ManagerBackup(BaseBackup):
|
||||
"""Backup class."""
|
||||
|
||||
agents: dict[str, AgentBackupStatus]
|
||||
failed_addons: list[AddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[Folder]
|
||||
with_automatic_settings: bool | None
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class AddonErrorData:
|
||||
"""Addon error class."""
|
||||
|
||||
addon: AddonInfo
|
||||
errors: list[tuple[str, str]]
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class WrittenBackup:
|
||||
"""Written backup class."""
|
||||
|
||||
addon_errors: dict[str, AddonErrorData]
|
||||
backup: AgentBackup
|
||||
folder_errors: dict[Folder, list[tuple[str, str]]]
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]]
|
||||
release_stream: Callable[[], Coroutine[Any, Any, None]]
|
||||
|
||||
@ -636,9 +649,13 @@ class BackupManager:
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_addons = known_backup.failed_addons
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
failed_folders = known_backup.failed_folders
|
||||
else:
|
||||
failed_addons = []
|
||||
failed_agent_ids = []
|
||||
failed_folders = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
agent_backup, await instance_id.async_get(self.hass)
|
||||
)
|
||||
@ -649,7 +666,9 @@ class BackupManager:
|
||||
date=agent_backup.date,
|
||||
database_included=agent_backup.database_included,
|
||||
extra_metadata=agent_backup.extra_metadata,
|
||||
failed_addons=failed_addons,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
failed_folders=failed_folders,
|
||||
folders=agent_backup.folders,
|
||||
homeassistant_included=agent_backup.homeassistant_included,
|
||||
homeassistant_version=agent_backup.homeassistant_version,
|
||||
@ -704,9 +723,13 @@ class BackupManager:
|
||||
continue
|
||||
if backup is None:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_addons = known_backup.failed_addons
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
failed_folders = known_backup.failed_folders
|
||||
else:
|
||||
failed_addons = []
|
||||
failed_agent_ids = []
|
||||
failed_folders = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
result, await instance_id.async_get(self.hass)
|
||||
)
|
||||
@ -717,7 +740,9 @@ class BackupManager:
|
||||
date=result.date,
|
||||
database_included=result.database_included,
|
||||
extra_metadata=result.extra_metadata,
|
||||
failed_addons=failed_addons,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
failed_folders=failed_folders,
|
||||
folders=result.folders,
|
||||
homeassistant_included=result.homeassistant_included,
|
||||
homeassistant_version=result.homeassistant_version,
|
||||
@ -960,7 +985,7 @@ class BackupManager:
|
||||
password=None,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors, [])
|
||||
self.known_backups.add(written_backup.backup, agent_errors, {}, {}, [])
|
||||
return written_backup.backup.backup_id
|
||||
|
||||
async def async_create_backup(
|
||||
@ -1198,7 +1223,11 @@ class BackupManager:
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(
|
||||
written_backup.backup, agent_errors, unavailable_agents
|
||||
written_backup.backup,
|
||||
agent_errors,
|
||||
written_backup.addon_errors,
|
||||
written_backup.folder_errors,
|
||||
unavailable_agents,
|
||||
)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
@ -1208,7 +1237,9 @@ class BackupManager:
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
|
||||
self._update_issue_after_agent_upload(
|
||||
written_backup, agent_errors, unavailable_agents
|
||||
)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
@ -1354,8 +1385,10 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
def _create_automatic_backup_failed_issue(
|
||||
self, translation_key: str, translation_placeholders: dict[str, str] | None
|
||||
) -> None:
|
||||
"""Create an issue in the issue registry for automatic backup failures."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
@ -1364,37 +1397,73 @@ class BackupManager:
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_create",
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_create", None
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
|
||||
self,
|
||||
written_backup: WrittenBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
if not agent_errors and not unavailable_agents:
|
||||
|
||||
addon_errors = written_backup.addon_errors
|
||||
failed_agents = unavailable_agents + [
|
||||
self.backup_agents[agent_id].name for agent_id in agent_errors
|
||||
]
|
||||
folder_errors = written_backup.folder_errors
|
||||
|
||||
if not failed_agents and not addon_errors and not folder_errors:
|
||||
# No issues to report, clear previous error
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"automatic_backup_failed",
|
||||
is_fixable=False,
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={
|
||||
"failed_agents": ", ".join(
|
||||
chain(
|
||||
(
|
||||
self.backup_agents[agent_id].name
|
||||
for agent_id in agent_errors
|
||||
),
|
||||
unavailable_agents,
|
||||
if failed_agents and not (addon_errors or folder_errors):
|
||||
# No issues with add-ons or folders, but issues with agents
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_upload_agents",
|
||||
{"failed_agents": ", ".join(failed_agents)},
|
||||
)
|
||||
elif addon_errors and not (failed_agents or folder_errors):
|
||||
# No issues with agents or folders, but issues with add-ons
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_addons",
|
||||
{
|
||||
"failed_addons": ", ".join(
|
||||
val.addon.name or val.addon.slug
|
||||
for val in addon_errors.values()
|
||||
)
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
elif folder_errors and not (failed_agents or addon_errors):
|
||||
# No issues with agents or add-ons, but issues with folders
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_folders",
|
||||
{"failed_folders": ", ".join(folder for folder in folder_errors)},
|
||||
)
|
||||
else:
|
||||
# Issues with agents, add-ons, and/or folders
|
||||
self._create_automatic_backup_failed_issue(
|
||||
"automatic_backup_failed_agents_addons_folders",
|
||||
{
|
||||
"failed_agents": ", ".join(failed_agents) or "-",
|
||||
"failed_addons": (
|
||||
", ".join(
|
||||
val.addon.name or val.addon.slug
|
||||
for val in addon_errors.values()
|
||||
)
|
||||
or "-"
|
||||
),
|
||||
"failed_folders": ", ".join(f for f in folder_errors) or "-",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_can_decrypt_on_download(
|
||||
self,
|
||||
@ -1460,7 +1529,12 @@ class KnownBackups:
|
||||
self._backups = {
|
||||
backup["backup_id"]: KnownBackup(
|
||||
backup_id=backup["backup_id"],
|
||||
failed_addons=[
|
||||
AddonInfo(name=a["name"], slug=a["slug"], version=a["version"])
|
||||
for a in backup["failed_addons"]
|
||||
],
|
||||
failed_agent_ids=backup["failed_agent_ids"],
|
||||
failed_folders=[Folder(f) for f in backup["failed_folders"]],
|
||||
)
|
||||
for backup in stored_backups
|
||||
}
|
||||
@ -1473,12 +1547,16 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
failed_addons: dict[str, AddonErrorData],
|
||||
failed_folders: dict[Folder, list[tuple[str, str]]],
|
||||
unavailable_agents: list[str],
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_addons=[val.addon for val in failed_addons.values()],
|
||||
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
|
||||
failed_folders=list(failed_folders),
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@ -1499,21 +1577,38 @@ class KnownBackup:
|
||||
"""Persistent backup data."""
|
||||
|
||||
backup_id: str
|
||||
failed_addons: list[AddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[Folder]
|
||||
|
||||
def to_dict(self) -> StoredKnownBackup:
|
||||
"""Convert known backup to a dict."""
|
||||
return {
|
||||
"backup_id": self.backup_id,
|
||||
"failed_addons": [
|
||||
{"name": a.name, "slug": a.slug, "version": a.version}
|
||||
for a in self.failed_addons
|
||||
],
|
||||
"failed_agent_ids": self.failed_agent_ids,
|
||||
"failed_folders": [f.value for f in self.failed_folders],
|
||||
}
|
||||
|
||||
|
||||
class StoredAddonInfo(TypedDict):
|
||||
"""Stored add-on info."""
|
||||
|
||||
name: str | None
|
||||
slug: str
|
||||
version: str | None
|
||||
|
||||
|
||||
class StoredKnownBackup(TypedDict):
|
||||
"""Stored persistent backup data."""
|
||||
|
||||
backup_id: str
|
||||
failed_addons: list[StoredAddonInfo]
|
||||
failed_agent_ids: list[str]
|
||||
failed_folders: list[str]
|
||||
|
||||
|
||||
class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
@ -1677,7 +1772,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
addon_errors={},
|
||||
backup=backup,
|
||||
folder_errors={},
|
||||
open_stream=open_backup,
|
||||
release_stream=remove_backup,
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
@ -1816,7 +1915,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
await async_add_executor_job(temp_file.unlink, True)
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
addon_errors={},
|
||||
backup=backup,
|
||||
folder_errors={},
|
||||
open_stream=open_backup,
|
||||
release_stream=remove_backup,
|
||||
)
|
||||
|
||||
async def async_restore_backup(
|
||||
|
@ -13,9 +13,9 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
class AddonInfo:
|
||||
"""Addon information."""
|
||||
|
||||
name: str
|
||||
name: str | None
|
||||
slug: str
|
||||
version: str
|
||||
version: str | None
|
||||
|
||||
|
||||
class Folder(StrEnum):
|
||||
|
@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 6
|
||||
STORAGE_VERSION_MINOR = 7
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@ -76,8 +76,16 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
# Version 1.6 adds agent retention settings
|
||||
for agent in data["config"]["agents"]:
|
||||
data["config"]["agents"][agent]["retention"] = None
|
||||
if old_minor_version < 7:
|
||||
# Version 1.7 adds failing addons and folders
|
||||
for backup in data["backups"]:
|
||||
backup["failed_addons"] = []
|
||||
backup["failed_folders"] = []
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Note: We allow reading data with major version 2 in which the unused key
|
||||
# data["config"]["schedule"]["state"] will be removed. The bump to 2 is
|
||||
# planned to happen after a 6 month quiet period with no minor version
|
||||
# changes.
|
||||
# Reject if major version is higher than 2.
|
||||
if old_major_version > 2:
|
||||
raise NotImplementedError
|
||||
|
@ -11,6 +11,18 @@
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"title": "Not all add-ons could be included in automatic backup",
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"title": "Automatic backup was created with errors",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_folders": {
|
||||
"title": "Not all folders could be included in automatic backup",
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@ -24,6 +36,22 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"automatic_backup_event": {
|
||||
"name": "Automatic backup",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"completed": "Completed successfully",
|
||||
"failed": "Failed",
|
||||
"in_progress": "In progress"
|
||||
}
|
||||
},
|
||||
"backup_stage": { "name": "Backup stage" },
|
||||
"failed_reason": { "name": "Failure reason" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"backup_manager_state": {
|
||||
"name": "Backup Manager state",
|
||||
|
@ -21,7 +21,6 @@ from .entity import BleBoxEntity
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
|
||||
BLEBOX_TO_HVACMODE = {
|
||||
None: None,
|
||||
0: HVACMode.OFF,
|
||||
1: HVACMode.HEAT,
|
||||
2: HVACMode.COOL,
|
||||
@ -59,12 +58,14 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def hvac_modes(self):
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return list of supported HVAC modes."""
|
||||
if self._feature.mode is None:
|
||||
return [HVACMode.OFF]
|
||||
return [HVACMode.OFF, BLEBOX_TO_HVACMODE[self._feature.mode]]
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the desired HVAC mode."""
|
||||
if self._feature.is_on is None:
|
||||
return None
|
||||
@ -75,7 +76,7 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
return HVACMode.HEAT if self._feature.is_on else HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self):
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the actual current HVAC action."""
|
||||
if self._feature.hvac_action is not None:
|
||||
if not self._feature.is_on:
|
||||
@ -88,22 +89,22 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
return HVACAction.HEATING if self._feature.is_heating else HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def max_temp(self):
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature supported."""
|
||||
return self._feature.max_temp
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
def min_temp(self) -> float:
|
||||
"""Return the maximum temperature supported."""
|
||||
return self._feature.min_temp
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._feature.current
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the desired thermostat temperature."""
|
||||
return self._feature.desired
|
||||
|
||||
|
@ -84,7 +84,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp)
|
||||
|
||||
@property
|
||||
def color_mode(self):
|
||||
def color_mode(self) -> ColorMode:
|
||||
"""Return the color mode.
|
||||
|
||||
Set values to _attr_ibutes if needed.
|
||||
@ -92,7 +92,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF)
|
||||
|
||||
@property
|
||||
def supported_color_modes(self):
|
||||
def supported_color_modes(self) -> set[ColorMode]:
|
||||
"""Return supported color modes."""
|
||||
return {self.color_mode}
|
||||
|
||||
@ -107,7 +107,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return self._feature.effect
|
||||
|
||||
@property
|
||||
def rgb_color(self):
|
||||
def rgb_color(self) -> tuple[int, int, int] | None:
|
||||
"""Return value for rgb."""
|
||||
if (rgb_hex := self._feature.rgb_hex) is None:
|
||||
return None
|
||||
@ -118,14 +118,14 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def rgbw_color(self):
|
||||
def rgbw_color(self) -> tuple[int, int, int, int] | None:
|
||||
"""Return the hue and saturation."""
|
||||
if (rgbw_hex := self._feature.rgbw_hex) is None:
|
||||
return None
|
||||
return tuple(blebox_uniapi.light.Light.rgb_hex_to_rgb_list(rgbw_hex)[0:4])
|
||||
|
||||
@property
|
||||
def rgbww_color(self):
|
||||
def rgbww_color(self) -> tuple[int, int, int, int, int] | None:
|
||||
"""Return value for rgbww."""
|
||||
if (rgbww_hex := self._feature.rgbww_hex) is None:
|
||||
return None
|
||||
|
@ -24,7 +24,7 @@ from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
|
||||
|
||||
type BlueCurrentConfigEntry = ConfigEntry[Connector]
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.BUTTON, Platform.SENSOR]
|
||||
CHARGE_POINTS = "CHARGE_POINTS"
|
||||
DATA = "data"
|
||||
DELAY = 5
|
||||
|
89
homeassistant/components/blue_current/button.py
Normal file
89
homeassistant/components/blue_current/button.py
Normal file
@ -0,0 +1,89 @@
|
||||
"""Support for Blue Current buttons."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from bluecurrent_api.client import Client
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BlueCurrentConfigEntry, Connector
|
||||
from .entity import ChargepointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class ChargePointButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes a Blue Current button entity."""
|
||||
|
||||
function: Callable[[Client, str], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
CHARGE_POINT_BUTTONS = (
|
||||
ChargePointButtonEntityDescription(
|
||||
key="reset",
|
||||
translation_key="reset",
|
||||
function=lambda client, evse_id: client.reset(evse_id),
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
),
|
||||
ChargePointButtonEntityDescription(
|
||||
key="reboot",
|
||||
translation_key="reboot",
|
||||
function=lambda client, evse_id: client.reboot(evse_id),
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
),
|
||||
ChargePointButtonEntityDescription(
|
||||
key="stop_charge_session",
|
||||
translation_key="stop_charge_session",
|
||||
function=lambda client, evse_id: client.stop_session(evse_id),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BlueCurrentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Blue Current buttons."""
|
||||
connector: Connector = entry.runtime_data
|
||||
async_add_entities(
|
||||
ChargePointButton(
|
||||
connector,
|
||||
button,
|
||||
evse_id,
|
||||
)
|
||||
for evse_id in connector.charge_points
|
||||
for button in CHARGE_POINT_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class ChargePointButton(ChargepointEntity, ButtonEntity):
|
||||
"""Define a charge point button."""
|
||||
|
||||
has_value = True
|
||||
entity_description: ChargePointButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
connector: Connector,
|
||||
description: ChargePointButtonEntityDescription,
|
||||
evse_id: str,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
super().__init__(connector, evse_id)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{description.key}_{evse_id}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.function(self.connector.client, self.evse_id)
|
@ -1,7 +1,5 @@
|
||||
"""Entity representing a Blue Current charge point."""
|
||||
|
||||
from abc import abstractmethod
|
||||
|
||||
from homeassistant.const import ATTR_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@ -17,12 +15,12 @@ class BlueCurrentEntity(Entity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
has_value = False
|
||||
|
||||
def __init__(self, connector: Connector, signal: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
self.connector = connector
|
||||
self.signal = signal
|
||||
self.has_value = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
@ -43,7 +41,6 @@ class BlueCurrentEntity(Entity):
|
||||
return self.connector.connected and self.has_value
|
||||
|
||||
@callback
|
||||
@abstractmethod
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the entity from the latest data."""
|
||||
|
||||
|
@ -19,6 +19,17 @@
|
||||
"current_left": {
|
||||
"default": "mdi:gauge"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"reset": {
|
||||
"default": "mdi:restart"
|
||||
},
|
||||
"reboot": {
|
||||
"default": "mdi:restart-alert"
|
||||
},
|
||||
"stop_charge_session": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "blue_current",
|
||||
"name": "Blue Current",
|
||||
"codeowners": ["@Floris272", "@gleeuwen"],
|
||||
"codeowners": ["@gleeuwen", "@NickKoepr", "@jtodorova23"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
|
@ -113,6 +113,17 @@
|
||||
"grid_max_current": {
|
||||
"name": "Max grid current"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"stop_charge_session": {
|
||||
"name": "Stop charge session"
|
||||
},
|
||||
"reboot": {
|
||||
"name": "Reboot"
|
||||
},
|
||||
"reset": {
|
||||
"name": "Reset"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,7 @@
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.1",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.48.2"
|
||||
|
@ -6,21 +6,31 @@ from ssl import SSLError
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
from .services import setup_services
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up bosch alarm services."""
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
@ -52,8 +62,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
mac = entry.data.get(CONF_MAC)
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(CONNECTION_NETWORK_MAC, mac)} if mac else set(),
|
||||
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
|
@ -12,8 +12,8 @@ from homeassistant.components.alarm_control_panel import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@ -34,6 +34,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
"""An alarm control panel entity for a bosch alarm panel."""
|
||||
|
||||
|
220
homeassistant/components/bosch_alarm/binary_sensor.py
Normal file
220
homeassistant/components/bosch_alarm/binary_sensor.py
Normal file
@ -0,0 +1,220 @@
|
||||
"""Support for Bosch Alarm Panel binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
from bosch_alarm_mode2.const import ALARM_PANEL_FAULTS
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity, BoschAlarmEntity, BoschAlarmPointEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BoschAlarmFaultEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Bosch Alarm sensor entity."""
|
||||
|
||||
fault: int
|
||||
|
||||
|
||||
FAULT_TYPES = [
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_battery_low",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
fault=ALARM_PANEL_FAULTS.BATTERY_LOW,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_battery_mising",
|
||||
translation_key="panel_fault_battery_mising",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.BATTERY_MISING,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_ac_fail",
|
||||
translation_key="panel_fault_ac_fail",
|
||||
entity_registry_enabled_default=True,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.AC_FAIL,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_phone_line_failure",
|
||||
translation_key="panel_fault_phone_line_failure",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
fault=ALARM_PANEL_FAULTS.PHONE_LINE_FAILURE,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_parameter_crc_fail_in_pif",
|
||||
translation_key="panel_fault_parameter_crc_fail_in_pif",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.PARAMETER_CRC_FAIL_IN_PIF,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_communication_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_communication_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.COMMUNICATION_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_sdi_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_sdi_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.SDI_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_user_code_tamper_since_rps_hang_up",
|
||||
translation_key="panel_fault_user_code_tamper_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.USER_CODE_TAMPER_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_fail_to_call_rps_since_rps_hang_up",
|
||||
translation_key="panel_fault_fail_to_call_rps_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
fault=ALARM_PANEL_FAULTS.FAIL_TO_CALL_RPS_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_point_bus_fail_since_rps_hang_up",
|
||||
translation_key="panel_fault_point_bus_fail_since_rps_hang_up",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.POINT_BUS_FAIL_SINCE_RPS_HANG_UP,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_log_overflow",
|
||||
translation_key="panel_fault_log_overflow",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.LOG_OVERFLOW,
|
||||
),
|
||||
BoschAlarmFaultEntityDescription(
|
||||
key="panel_fault_log_threshold",
|
||||
translation_key="panel_fault_log_threshold",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
fault=ALARM_PANEL_FAULTS.LOG_THRESHOLD,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BoschAlarmConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up binary sensors for alarm points and the connection status."""
|
||||
panel = config_entry.runtime_data
|
||||
|
||||
entities: list[BinarySensorEntity] = [
|
||||
PointSensor(panel, point_id, config_entry.unique_id or config_entry.entry_id)
|
||||
for point_id in panel.points
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
PanelFaultsSensor(
|
||||
panel,
|
||||
config_entry.unique_id or config_entry.entry_id,
|
||||
fault_type,
|
||||
)
|
||||
for fault_type in FAULT_TYPES
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
AreaReadyToArmSensor(
|
||||
panel, area_id, config_entry.unique_id or config_entry.entry_id, "away"
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
AreaReadyToArmSensor(
|
||||
panel, area_id, config_entry.unique_id or config_entry.entry_id, "home"
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class PanelFaultsSensor(BoschAlarmEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity for each fault type in a bosch alarm panel."""
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
entity_description: BoschAlarmFaultEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
panel: Panel,
|
||||
unique_id: str,
|
||||
entity_description: BoschAlarmFaultEntityDescription,
|
||||
) -> None:
|
||||
"""Set up a binary sensor entity for each fault type in a bosch alarm panel."""
|
||||
super().__init__(panel, unique_id, True)
|
||||
self.entity_description = entity_description
|
||||
self._fault_type = entity_description.fault
|
||||
self._attr_unique_id = f"{unique_id}_fault_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this fault has occurred."""
|
||||
return self._fault_type in self.panel.panel_faults_ids
|
||||
|
||||
|
||||
class AreaReadyToArmSensor(BoschAlarmAreaEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity showing if a panel is ready to arm."""
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(
|
||||
self, panel: Panel, area_id: int, unique_id: str, arm_type: str
|
||||
) -> None:
|
||||
"""Set up a binary sensor entity for the arming status in a bosch alarm panel."""
|
||||
super().__init__(panel, area_id, unique_id, False, False, True)
|
||||
self.panel = panel
|
||||
self._arm_type = arm_type
|
||||
self._attr_translation_key = f"area_ready_to_arm_{arm_type}"
|
||||
self._attr_unique_id = f"{self._area_unique_id}_ready_to_arm_{arm_type}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this panel is ready to arm."""
|
||||
if self._arm_type == "away":
|
||||
return self._area.all_ready
|
||||
if self._arm_type == "home":
|
||||
return self._area.all_ready or self._area.part_ready
|
||||
return False
|
||||
|
||||
|
||||
class PointSensor(BoschAlarmPointEntity, BinarySensorEntity):
|
||||
"""A binary sensor entity for a point in a bosch alarm panel."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, panel: Panel, point_id: int, unique_id: str) -> None:
|
||||
"""Set up a binary sensor entity for a point in a bosch alarm panel."""
|
||||
super().__init__(panel, point_id, unique_id)
|
||||
self._attr_unique_id = self._point_unique_id
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return if this point sensor is on."""
|
||||
return self._point.is_open()
|
@ -6,25 +6,30 @@ import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_DHCP,
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_MODEL,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
@ -88,6 +93,12 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Init config flow."""
|
||||
|
||||
self._data: dict[str, Any] = {}
|
||||
self.mac: str | None = None
|
||||
self.host: str | None = None
|
||||
|
||||
def is_matching(self, other_flow: Self) -> bool:
|
||||
"""Return True if other_flow is matching this flow."""
|
||||
return self.mac == other_flow.mac or self.host == other_flow.host
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -96,9 +107,12 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
if self.source == SOURCE_USER:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, serial) = await try_connect(user_input, 0)
|
||||
(model, _) = await try_connect(user_input, 0)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
@ -129,6 +143,70 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
self.mac = format_mac(discovery_info.macaddress)
|
||||
self.host = discovery_info.ip
|
||||
if self.hass.config_entries.flow.async_has_matching_flow(self):
|
||||
return self.async_abort(reason="already_in_progress")
|
||||
|
||||
for entry in self.hass.config_entries.async_entries(DOMAIN):
|
||||
if entry.data.get(CONF_MAC) == self.mac:
|
||||
result = self.hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_HOST: discovery_info.ip,
|
||||
},
|
||||
)
|
||||
if result:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
if entry.data[CONF_HOST] == discovery_info.ip:
|
||||
if (
|
||||
not entry.data.get(CONF_MAC)
|
||||
and entry.state is ConfigEntryState.LOADED
|
||||
):
|
||||
result = self.hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
if result:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, _) = await try_connect(
|
||||
{CONF_HOST: discovery_info.ip, CONF_PORT: 7700}, 0
|
||||
)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
asyncio.exceptions.TimeoutError,
|
||||
):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
self.context["title_placeholders"] = {
|
||||
"model": model,
|
||||
"host": discovery_info.ip,
|
||||
}
|
||||
self._data = {
|
||||
CONF_HOST: discovery_info.ip,
|
||||
CONF_MAC: self.mac,
|
||||
CONF_MODEL: model,
|
||||
CONF_PORT: 7700,
|
||||
}
|
||||
|
||||
return await self.async_step_auth()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -172,7 +250,7 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(str(serial_number))
|
||||
if self.source == SOURCE_USER:
|
||||
if self.source in (SOURCE_USER, SOURCE_DHCP):
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
@ -184,6 +262,7 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_mismatch(reason="device_mismatch")
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data=self._data,
|
||||
|
@ -1,6 +1,9 @@
|
||||
"""Constants for the Bosch Alarm integration."""
|
||||
|
||||
DOMAIN = "bosch_alarm"
|
||||
HISTORY_ATTR = "history"
|
||||
ATTR_HISTORY = "history"
|
||||
CONF_INSTALLER_CODE = "installer_code"
|
||||
CONF_USER_CODE = "user_code"
|
||||
ATTR_DATETIME = "datetime"
|
||||
SERVICE_SET_DATE_TIME = "set_date_time"
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
|
@ -6,8 +6,8 @@ from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
TO_REDACT = [CONF_INSTALLER_CODE, CONF_USER_CODE, CONF_PASSWORD]
|
||||
|
||||
|
@ -17,9 +17,13 @@ class BoschAlarmEntity(Entity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, panel: Panel, unique_id: str) -> None:
|
||||
def __init__(
|
||||
self, panel: Panel, unique_id: str, observe_faults: bool = False
|
||||
) -> None:
|
||||
"""Set up a entity for a bosch alarm panel."""
|
||||
self.panel = panel
|
||||
self._observe_faults = observe_faults
|
||||
self._attr_should_poll = False
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
@ -34,10 +38,14 @@ class BoschAlarmEntity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||
if self._observe_faults:
|
||||
self.panel.faults_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
||||
if self._observe_faults:
|
||||
self.panel.faults_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmAreaEntity(BoschAlarmEntity):
|
||||
@ -88,6 +96,33 @@ class BoschAlarmAreaEntity(BoschAlarmEntity):
|
||||
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmPointEntity(BoschAlarmEntity):
|
||||
"""A base entity for point related entities within a bosch alarm panel."""
|
||||
|
||||
def __init__(self, panel: Panel, point_id: int, unique_id: str) -> None:
|
||||
"""Set up a area related entity for a bosch alarm panel."""
|
||||
super().__init__(panel, unique_id)
|
||||
self._point_id = point_id
|
||||
self._point_unique_id = f"{unique_id}_point_{point_id}"
|
||||
self._point = panel.points[point_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._point_unique_id)},
|
||||
name=self._point.name,
|
||||
manufacturer="Bosch Security Systems",
|
||||
via_device=(DOMAIN, unique_id),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
await super().async_added_to_hass()
|
||||
self._point.status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
await super().async_added_to_hass()
|
||||
self._point.status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmDoorEntity(BoschAlarmEntity):
|
||||
"""A base entity for area related entities within a bosch alarm panel."""
|
||||
|
||||
|
@ -1,6 +1,20 @@
|
||||
{
|
||||
"services": {
|
||||
"set_date_time": {
|
||||
"service": "mdi:clock-edit"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"alarms_gas": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"alarms_fire": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"alarms_burglary": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"faulting_points": {
|
||||
"default": "mdi:alert-circle"
|
||||
}
|
||||
@ -24,6 +38,44 @@
|
||||
"on": "mdi:lock-open"
|
||||
}
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"panel_fault_parameter_crc_fail_in_pif": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_phone_line_failure": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_sdi_fail_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_user_code_tamper_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_fail_to_call_rps_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_point_bus_fail_since_rps_hang_up": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_log_overflow": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"panel_fault_log_threshold": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
"area_ready_to_arm_away": {
|
||||
"default": "mdi:shield",
|
||||
"state": {
|
||||
"on": "mdi:shield-lock"
|
||||
}
|
||||
},
|
||||
"area_ready_to_arm_home": {
|
||||
"default": "mdi:shield",
|
||||
"state": {
|
||||
"on": "mdi:shield-home"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,11 @@
|
||||
"name": "Bosch Alarm",
|
||||
"codeowners": ["@mag1024", "@sanjay900"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"macaddress": "000463*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bosch_alarm",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
|
@ -13,10 +13,7 @@ rules:
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
@ -29,25 +26,22 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
@ -6,6 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
from bosch_alarm_mode2.const import ALARM_MEMORY_PRIORITIES
|
||||
from bosch_alarm_mode2.panel import Area
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
@ -15,18 +16,53 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
|
||||
ALARM_TYPES = {
|
||||
"burglary": {
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.BURGLARY_ALARM: "alarm",
|
||||
},
|
||||
"gas": {
|
||||
ALARM_MEMORY_PRIORITIES.GAS_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.GAS_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.GAS_ALARM: "alarm",
|
||||
},
|
||||
"fire": {
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_SUPERVISORY: "supervisory",
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_TROUBLE: "trouble",
|
||||
ALARM_MEMORY_PRIORITIES.FIRE_ALARM: "alarm",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BoschAlarmSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Bosch Alarm sensor entity."""
|
||||
|
||||
value_fn: Callable[[Area], int]
|
||||
value_fn: Callable[[Area], str | int]
|
||||
observe_alarms: bool = False
|
||||
observe_ready: bool = False
|
||||
observe_status: bool = False
|
||||
|
||||
|
||||
def priority_value_fn(priority_info: dict[int, str]) -> Callable[[Area], str]:
|
||||
"""Build a value_fn for a given priority type."""
|
||||
return lambda area: next(
|
||||
(key for priority, key in priority_info.items() if priority in area.alarms_ids),
|
||||
"no_issues",
|
||||
)
|
||||
|
||||
|
||||
SENSOR_TYPES: list[BoschAlarmSensorEntityDescription] = [
|
||||
*[
|
||||
BoschAlarmSensorEntityDescription(
|
||||
key=f"alarms_{key}",
|
||||
translation_key=f"alarms_{key}",
|
||||
value_fn=priority_value_fn(priority_type),
|
||||
observe_alarms=True,
|
||||
)
|
||||
for key, priority_type in ALARM_TYPES.items()
|
||||
],
|
||||
BoschAlarmSensorEntityDescription(
|
||||
key="faulting_points",
|
||||
translation_key="faulting_points",
|
||||
@ -81,6 +117,6 @@ class BoschAreaSensor(BoschAlarmAreaEntity, SensorEntity):
|
||||
self._attr_unique_id = f"{self._area_unique_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
def native_value(self) -> str | int:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self._area)
|
||||
|
77
homeassistant/components/bosch_alarm/services.py
Normal file
77
homeassistant/components/bosch_alarm/services.py
Normal file
@ -0,0 +1,77 @@
|
||||
"""Services for the bosch_alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY_ID, ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
|
||||
from .types import BoschAlarmConfigEntry
|
||||
|
||||
|
||||
def validate_datetime(value: Any) -> dt.datetime:
|
||||
"""Validate that a provided datetime is supported on a bosch alarm panel."""
|
||||
date_val = cv.datetime(value)
|
||||
if date_val.year < 2010:
|
||||
raise vol.RangeInvalid("datetime must be after 2009")
|
||||
|
||||
if date_val.year > 2037:
|
||||
raise vol.RangeInvalid("datetime must be before 2038")
|
||||
|
||||
return date_val
|
||||
|
||||
|
||||
SET_DATE_TIME_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Optional(ATTR_DATETIME): validate_datetime,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_set_panel_date(call: ServiceCall) -> None:
|
||||
"""Set the date and time on a bosch alarm panel."""
|
||||
config_entry: BoschAlarmConfigEntry | None
|
||||
value: dt.datetime = call.data.get(ATTR_DATETIME, dt_util.now())
|
||||
entry_id = call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": entry_id},
|
||||
)
|
||||
if config_entry.state is not ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
panel = config_entry.runtime_data
|
||||
try:
|
||||
await panel.set_panel_date(value)
|
||||
except asyncio.InvalidStateError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
) from err
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the bosch alarm integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_DATE_TIME,
|
||||
async_set_panel_date,
|
||||
schema=SET_DATE_TIME_SCHEMA,
|
||||
)
|
12
homeassistant/components/bosch_alarm/services.yaml
Normal file
12
homeassistant/components/bosch_alarm/services.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
set_date_time:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: bosch_alarm
|
||||
datetime:
|
||||
required: false
|
||||
example: "2025-05-10 00:00:00"
|
||||
selector:
|
||||
datetime:
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "{model} ({host})",
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
@ -42,6 +43,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
@ -49,6 +51,18 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"integration_not_found": {
|
||||
"message": "Integration \"{target}\" not found in registry."
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"connection_error": {
|
||||
"message": "Could not connect to \"{target}\"."
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "An unknown error occurred while setting the date and time on \"{target}\"."
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Could not connect to panel."
|
||||
},
|
||||
@ -56,22 +70,111 @@
|
||||
"message": "Incorrect credentials for panel."
|
||||
},
|
||||
"incorrect_door_state": {
|
||||
"message": "Door cannot be manipulated while it is being cycled."
|
||||
"message": "Door cannot be manipulated while it is momentarily unlocked."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_date_time": {
|
||||
"name": "Set date & time",
|
||||
"description": "Sets the date and time on the alarm panel.",
|
||||
"fields": {
|
||||
"datetime": {
|
||||
"name": "Date & time",
|
||||
"description": "The date and time to set. The time zone of the Home Assistant instance is assumed. If omitted, the current date and time is used."
|
||||
},
|
||||
"config_entry_id": {
|
||||
"name": "Config entry",
|
||||
"description": "The Bosch Alarm integration ID."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"panel_fault_battery_mising": {
|
||||
"name": "Battery missing"
|
||||
},
|
||||
"panel_fault_ac_fail": {
|
||||
"name": "AC Failure"
|
||||
},
|
||||
"panel_fault_parameter_crc_fail_in_pif": {
|
||||
"name": "CRC failure in panel configuration"
|
||||
},
|
||||
"panel_fault_phone_line_failure": {
|
||||
"name": "Phone line failure"
|
||||
},
|
||||
"panel_fault_sdi_fail_since_rps_hang_up": {
|
||||
"name": "SDI failure since last RPS connection"
|
||||
},
|
||||
"panel_fault_user_code_tamper_since_rps_hang_up": {
|
||||
"name": "User code tamper since last RPS connection"
|
||||
},
|
||||
"panel_fault_fail_to_call_rps_since_rps_hang_up": {
|
||||
"name": "Failure to call RPS since last RPS connection"
|
||||
},
|
||||
"panel_fault_point_bus_fail_since_rps_hang_up": {
|
||||
"name": "Point bus failure since last RPS connection"
|
||||
},
|
||||
"panel_fault_log_overflow": {
|
||||
"name": "Log overflow"
|
||||
},
|
||||
"panel_fault_log_threshold": {
|
||||
"name": "Log threshold reached"
|
||||
},
|
||||
"area_ready_to_arm_away": {
|
||||
"name": "Area ready to arm away",
|
||||
"state": {
|
||||
"on": "Ready",
|
||||
"off": "Not ready"
|
||||
}
|
||||
},
|
||||
"area_ready_to_arm_home": {
|
||||
"name": "Area ready to arm home",
|
||||
"state": {
|
||||
"on": "Ready",
|
||||
"off": "Not ready"
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"secured": {
|
||||
"name": "Secured"
|
||||
},
|
||||
"cycling": {
|
||||
"name": "Cycling"
|
||||
"name": "Momentarily unlocked"
|
||||
},
|
||||
"locked": {
|
||||
"name": "Locked"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"alarms_gas": {
|
||||
"name": "Gas alarm issues",
|
||||
"state": {
|
||||
"supervisory": "Supervisory",
|
||||
"trouble": "Trouble",
|
||||
"alarm": "Alarm",
|
||||
"no_issues": "No issues"
|
||||
}
|
||||
},
|
||||
"alarms_fire": {
|
||||
"name": "Fire alarm issues",
|
||||
"state": {
|
||||
"supervisory": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::supervisory%]",
|
||||
"trouble": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::trouble%]",
|
||||
"alarm": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::alarm%]",
|
||||
"no_issues": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::no_issues%]"
|
||||
}
|
||||
},
|
||||
"alarms_burglary": {
|
||||
"name": "Burglary alarm issues",
|
||||
"state": {
|
||||
"supervisory": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::supervisory%]",
|
||||
"trouble": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::trouble%]",
|
||||
"alarm": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::alarm%]",
|
||||
"no_issues": "[%key:component::bosch_alarm::entity::sensor::alarms_gas::state::no_issues%]"
|
||||
}
|
||||
},
|
||||
"faulting_points": {
|
||||
"name": "Faulting points",
|
||||
"unit_of_measurement": "points"
|
||||
|
7
homeassistant/components/bosch_alarm/types.py
Normal file
7
homeassistant/components/bosch_alarm/types.py
Normal file
@ -0,0 +1,7 @@
|
||||
"""Types for the Bosch Alarm integration."""
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/camera",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["PyTurboJPEG==1.7.5"]
|
||||
"requirements": ["PyTurboJPEG==1.8.0"]
|
||||
}
|
||||
|
@ -18,23 +18,20 @@ from homeassistant.const import (
|
||||
SERVICE_TOGGLE,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.temperature import display_temp as show_temp
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_issue_tracker, async_suggest_report_issue
|
||||
from homeassistant.loader import async_suggest_report_issue
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
from .const import ( # noqa: F401
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@ -77,7 +74,6 @@ from .const import ( # noqa: F401
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
PRESET_SLEEP,
|
||||
SERVICE_SET_AUX_HEAT,
|
||||
SERVICE_SET_FAN_MODE,
|
||||
SERVICE_SET_HUMIDITY,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
@ -168,12 +164,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_handle_set_preset_mode_service",
|
||||
[ClimateEntityFeature.PRESET_MODE],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SET_AUX_HEAT,
|
||||
{vol.Required(ATTR_AUX_HEAT): cv.boolean},
|
||||
async_service_aux_heat,
|
||||
[ClimateEntityFeature.AUX_HEAT],
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
SET_TEMPERATURE_SCHEMA,
|
||||
@ -239,7 +229,6 @@ CACHED_PROPERTIES_WITH_ATTR_ = {
|
||||
"target_temperature_low",
|
||||
"preset_mode",
|
||||
"preset_modes",
|
||||
"is_aux_heat",
|
||||
"fan_mode",
|
||||
"fan_modes",
|
||||
"swing_mode",
|
||||
@ -279,7 +268,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_hvac_action: HVACAction | None = None
|
||||
_attr_hvac_mode: HVACMode | None
|
||||
_attr_hvac_modes: list[HVACMode]
|
||||
_attr_is_aux_heat: bool | None
|
||||
_attr_max_humidity: float = DEFAULT_MAX_HUMIDITY
|
||||
_attr_max_temp: float
|
||||
_attr_min_humidity: float = DEFAULT_MIN_HUMIDITY
|
||||
@ -299,52 +287,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_target_temperature: float | None = None
|
||||
_attr_temperature_unit: str
|
||||
|
||||
__climate_reported_legacy_aux = False
|
||||
|
||||
def _report_legacy_aux(self) -> None:
|
||||
"""Log warning and create an issue if the entity implements legacy auxiliary heater."""
|
||||
|
||||
report_issue = async_suggest_report_issue(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s::%s implements the `is_aux_heat` property or uses the auxiliary "
|
||||
"heater methods in a subclass of ClimateEntity which is "
|
||||
"deprecated and will be unsupported from Home Assistant 2025.4."
|
||||
" Please %s"
|
||||
),
|
||||
self.platform.platform_name,
|
||||
self.__class__.__name__,
|
||||
report_issue,
|
||||
)
|
||||
|
||||
translation_placeholders = {"platform": self.platform.platform_name}
|
||||
translation_key = "deprecated_climate_aux_no_url"
|
||||
issue_tracker = async_get_issue_tracker(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
if issue_tracker:
|
||||
translation_placeholders["issue_tracker"] = issue_tracker
|
||||
translation_key = "deprecated_climate_aux_url_custom"
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_climate_aux_{self.platform.platform_name}",
|
||||
breaks_in_ha_version="2025.4.0",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
issue_domain=self.platform.platform_name,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
self.__climate_reported_legacy_aux = True
|
||||
|
||||
@final
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
@ -453,14 +395,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features:
|
||||
data[ATTR_SWING_HORIZONTAL_MODE] = self.swing_horizontal_mode
|
||||
|
||||
if ClimateEntityFeature.AUX_HEAT in supported_features:
|
||||
data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF
|
||||
if (
|
||||
self.__climate_reported_legacy_aux is False
|
||||
and "custom_components" in type(self).__module__
|
||||
):
|
||||
self._report_legacy_aux()
|
||||
|
||||
return data
|
||||
|
||||
@cached_property
|
||||
@ -540,14 +474,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
return self._attr_preset_modes
|
||||
|
||||
@cached_property
|
||||
def is_aux_heat(self) -> bool | None:
|
||||
"""Return true if aux heater.
|
||||
|
||||
Requires ClimateEntityFeature.AUX_HEAT.
|
||||
"""
|
||||
return self._attr_is_aux_heat
|
||||
|
||||
@cached_property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the fan setting.
|
||||
@ -732,22 +658,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Set new preset mode."""
|
||||
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
|
||||
|
||||
def turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
await self.hass.async_add_executor_job(self.turn_aux_heat_on)
|
||||
|
||||
def turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
await self.hass.async_add_executor_job(self.turn_aux_heat_off)
|
||||
|
||||
def turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
raise NotImplementedError
|
||||
@ -845,16 +755,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return self._attr_max_humidity
|
||||
|
||||
|
||||
async def async_service_aux_heat(
|
||||
entity: ClimateEntity, service_call: ServiceCall
|
||||
) -> None:
|
||||
"""Handle aux heat service."""
|
||||
if service_call.data[ATTR_AUX_HEAT]:
|
||||
await entity.async_turn_aux_heat_on()
|
||||
else:
|
||||
await entity.async_turn_aux_heat_off()
|
||||
|
||||
|
||||
async def async_service_humidity_set(
|
||||
entity: ClimateEntity, service_call: ServiceCall
|
||||
) -> None:
|
||||
|
@ -96,7 +96,6 @@ class HVACAction(StrEnum):
|
||||
CURRENT_HVAC_ACTIONS = [cls.value for cls in HVACAction]
|
||||
|
||||
|
||||
ATTR_AUX_HEAT = "aux_heat"
|
||||
ATTR_CURRENT_HUMIDITY = "current_humidity"
|
||||
ATTR_CURRENT_TEMPERATURE = "current_temperature"
|
||||
ATTR_FAN_MODES = "fan_modes"
|
||||
@ -128,7 +127,6 @@ DOMAIN = "climate"
|
||||
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
SERVICE_SET_FAN_MODE = "set_fan_mode"
|
||||
SERVICE_SET_PRESET_MODE = "set_preset_mode"
|
||||
SERVICE_SET_HUMIDITY = "set_humidity"
|
||||
@ -147,7 +145,6 @@ class ClimateEntityFeature(IntFlag):
|
||||
FAN_MODE = 8
|
||||
PRESET_MODE = 16
|
||||
SWING_MODE = 32
|
||||
AUX_HEAT = 64
|
||||
TURN_OFF = 128
|
||||
TURN_ON = 256
|
||||
SWING_HORIZONTAL_MODE = 512
|
||||
|
@ -1,17 +1,5 @@
|
||||
# Describes the format for available climate services
|
||||
|
||||
set_aux_heat:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
supported_features:
|
||||
- climate.ClimateEntityFeature.AUX_HEAT
|
||||
fields:
|
||||
aux_heat:
|
||||
required: true
|
||||
selector:
|
||||
boolean:
|
||||
|
||||
set_preset_mode:
|
||||
target:
|
||||
entity:
|
||||
|
@ -12,7 +12,6 @@ from homeassistant.helpers.significant_change import (
|
||||
)
|
||||
|
||||
from . import (
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@ -27,7 +26,6 @@ from . import (
|
||||
)
|
||||
|
||||
SIGNIFICANT_ATTRIBUTES: set[str] = {
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_FAN_MODE,
|
||||
@ -67,7 +65,6 @@ def async_check_significant_change(
|
||||
|
||||
for attr_name in changed_attrs:
|
||||
if attr_name in [
|
||||
ATTR_AUX_HEAT,
|
||||
ATTR_FAN_MODE,
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_PRESET_MODE,
|
||||
|
@ -36,9 +36,6 @@
|
||||
"fan_only": "Fan only"
|
||||
},
|
||||
"state_attributes": {
|
||||
"aux_heat": {
|
||||
"name": "Aux heat"
|
||||
},
|
||||
"current_humidity": {
|
||||
"name": "Current humidity"
|
||||
},
|
||||
@ -149,16 +146,6 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_aux_heat": {
|
||||
"name": "Turn on/off auxiliary heater",
|
||||
"description": "Turns auxiliary heater on/off.",
|
||||
"fields": {
|
||||
"aux_heat": {
|
||||
"name": "Auxiliary heating",
|
||||
"description": "New value of auxiliary heater."
|
||||
}
|
||||
}
|
||||
},
|
||||
"set_preset_mode": {
|
||||
"name": "Set preset mode",
|
||||
"description": "Sets preset mode.",
|
||||
@ -267,16 +254,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_climate_aux_url_custom": {
|
||||
"title": "The {platform} custom integration is using deprecated climate auxiliary heater",
|
||||
"description": "The custom integration `{platform}` implements the `is_aux_heat` property or uses the auxiliary heater methods in a subclass of ClimateEntity.\n\nPlease create a bug report at {issue_tracker}.\n\nOnce an updated version of `{platform}` is available, install it and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"deprecated_climate_aux_no_url": {
|
||||
"title": "[%key:component::climate::issues::deprecated_climate_aux_url_custom::title%]",
|
||||
"description": "The custom integration `{platform}` implements the `is_aux_heat` property or uses the auxiliary heater methods in a subclass of ClimateEntity.\n\nPlease report it to the author of the {platform} integration.\n\nOnce an updated version of `{platform}` is available, install it and restart Home Assistant to fix this issue."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"not_valid_preset_mode": {
|
||||
"message": "Preset mode {mode} is not valid. Valid preset modes are: {modes}."
|
||||
|
@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.100.0"],
|
||||
"requirements": ["hass-nabucasa==0.101.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@ -17,12 +18,12 @@ from homeassistant.components.climate import (
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import PRESET_MODE_AUTO, PRESET_MODE_AUTO_TARGET_TEMP, PRESET_MODE_MANUAL
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call, cleanup_stale_entity, load_api_data
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -40,11 +41,13 @@ class ClimaComelitMode(StrEnum):
|
||||
class ClimaComelitCommand(StrEnum):
|
||||
"""Serial Bridge clima commands."""
|
||||
|
||||
AUTO = "auto"
|
||||
MANUAL = "man"
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
MANUAL = "man"
|
||||
SET = "set"
|
||||
AUTO = "auto"
|
||||
SNOW = "lower"
|
||||
SUN = "upper"
|
||||
|
||||
|
||||
class ClimaComelitApiStatus(TypedDict):
|
||||
@ -66,11 +69,15 @@ API_STATUS: dict[str, ClimaComelitApiStatus] = {
|
||||
),
|
||||
}
|
||||
|
||||
MODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = {
|
||||
HVACMODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = {
|
||||
HVACMode.OFF: ClimaComelitCommand.OFF,
|
||||
HVACMode.AUTO: ClimaComelitCommand.AUTO,
|
||||
HVACMode.COOL: ClimaComelitCommand.MANUAL,
|
||||
HVACMode.HEAT: ClimaComelitCommand.MANUAL,
|
||||
HVACMode.COOL: ClimaComelitCommand.SNOW,
|
||||
HVACMode.HEAT: ClimaComelitCommand.SUN,
|
||||
}
|
||||
|
||||
PRESET_MODE_TO_ACTION: dict[str, ClimaComelitCommand] = {
|
||||
PRESET_MODE_MANUAL: ClimaComelitCommand.MANUAL,
|
||||
PRESET_MODE_AUTO: ClimaComelitCommand.AUTO,
|
||||
}
|
||||
|
||||
|
||||
@ -83,26 +90,42 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
async_add_entities(
|
||||
ComelitClimateEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[CLIMATE].values()
|
||||
)
|
||||
entities: list[ClimateEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No climate data, device is only a humidifier/dehumidifier
|
||||
|
||||
await cleanup_stale_entity(
|
||||
hass, config_entry, f"{config_entry.entry_id}-{device.index}", device
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
entities.append(
|
||||
ComelitClimateEntity(coordinator, device, config_entry.entry_id)
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
"""Climate device."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.AUTO, HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_hvac_modes = [HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_preset_modes = [PRESET_MODE_AUTO, PRESET_MODE_MANUAL]
|
||||
_attr_max_temp = 30
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_TENTHS
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_name = None
|
||||
_attr_translation_key = "thermostat"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -117,35 +140,23 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
values = device.val[0]
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
_automatic = values[3] == ClimaComelitMode.AUTO
|
||||
|
||||
self._attr_preset_mode = PRESET_MODE_AUTO if _automatic else PRESET_MODE_MANUAL
|
||||
|
||||
self._attr_current_temperature = values[0] / 10
|
||||
|
||||
self._attr_hvac_action = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_action = HVACAction.OFF
|
||||
if not _active:
|
||||
self._attr_hvac_action = HVACAction.IDLE
|
||||
if _mode in API_STATUS:
|
||||
elif _mode in API_STATUS:
|
||||
self._attr_hvac_action = API_STATUS[_mode]["hvac_action"]
|
||||
|
||||
self._attr_hvac_mode = None
|
||||
if _mode == ClimaComelitMode.OFF:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
if _automatic:
|
||||
self._attr_hvac_mode = HVACMode.AUTO
|
||||
if _mode in API_STATUS:
|
||||
self._attr_hvac_mode = API_STATUS[_mode]["hvac_mode"]
|
||||
|
||||
@ -157,31 +168,48 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if (
|
||||
target_temp := kwargs.get(ATTR_TEMPERATURE)
|
||||
) is None or self.hvac_mode == HVACMode.OFF:
|
||||
(target_temp := kwargs.get(ATTR_TEMPERATURE)) is None
|
||||
or self.hvac_mode == HVACMode.OFF
|
||||
or self._attr_preset_mode == PRESET_MODE_AUTO
|
||||
):
|
||||
return
|
||||
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.MANUAL
|
||||
)
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.SET, target_temp
|
||||
)
|
||||
self._attr_target_temperature = target_temp
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
|
||||
if hvac_mode != HVACMode.OFF:
|
||||
if self._attr_hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, ClimaComelitCommand.ON
|
||||
)
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, MODE_TO_ACTION[hvac_mode]
|
||||
self._device.index, HVACMODE_TO_ACTION[hvac_mode]
|
||||
)
|
||||
self._attr_hvac_mode = hvac_mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
|
||||
if self._attr_hvac_mode == HVACMode.OFF:
|
||||
return
|
||||
|
||||
await self.coordinator.api.set_clima_status(
|
||||
self._device.index, PRESET_MODE_TO_ACTION[preset_mode]
|
||||
)
|
||||
self._attr_preset_mode = preset_mode
|
||||
|
||||
if preset_mode == PRESET_MODE_AUTO:
|
||||
self._attr_target_temperature = PRESET_MODE_AUTO_TARGET_TEMP
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
@ -28,20 +28,22 @@ DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = 111111
|
||||
|
||||
|
||||
def user_form_schema(user_input: dict[str, Any] | None) -> vol.Schema:
|
||||
"""Return user form schema."""
|
||||
user_input = user_input or {}
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@ -87,13 +89,11 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=user_form_schema(user_input)
|
||||
)
|
||||
return self.async_show_form(step_id="user", data_schema=USER_SCHEMA)
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
@ -108,21 +108,21 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=user_form_schema(user_input), errors=errors
|
||||
step_id="user", data_schema=USER_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth flow."""
|
||||
self.context["title_placeholders"] = {"host": entry_data[CONF_HOST]}
|
||||
self.context["title_placeholders"] = {CONF_HOST: entry_data[CONF_HOST]}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirm."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
entry_data = reauth_entry.data
|
||||
@ -163,6 +163,42 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
@ -11,3 +11,8 @@ DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
|
||||
SCAN_INTERVAL = 5
|
||||
|
||||
PRESET_MODE_AUTO = "automatic"
|
||||
PRESET_MODE_MANUAL = "manual"
|
||||
|
||||
PRESET_MODE_AUTO_TARGET_TEMP = 20
|
||||
|
@ -7,13 +7,14 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -68,16 +69,10 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Return if the cover is closed."""
|
||||
|
||||
if self._last_state in [None, "unknown"]:
|
||||
return None
|
||||
|
||||
if self.device_status != STATE_COVER.index("stopped"):
|
||||
return False
|
||||
|
||||
if self._last_action:
|
||||
return self._last_action == STATE_COVER.index("closing")
|
||||
|
||||
return self._last_state == CoverState.CLOSED
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
@ -89,6 +84,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
"""Return if the cover is opening."""
|
||||
return self._current_action("opening")
|
||||
|
||||
@bridge_api_call
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
|
@ -9,6 +9,7 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.humidifier import (
|
||||
DOMAIN as HUMIDIFIER_DOMAIN,
|
||||
MODE_AUTO,
|
||||
MODE_NORMAL,
|
||||
HumidifierAction,
|
||||
@ -17,12 +18,13 @@ from homeassistant.components.humidifier import (
|
||||
HumidifierEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call, cleanup_stale_entity, load_api_data
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -66,6 +68,23 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[ComelitHumidifierEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No humidity data, device is only a climate
|
||||
|
||||
for device_class in (
|
||||
HumidifierDeviceClass.HUMIDIFIER,
|
||||
HumidifierDeviceClass.DEHUMIDIFIER,
|
||||
):
|
||||
await cleanup_stale_entity(
|
||||
hass,
|
||||
config_entry,
|
||||
f"{config_entry.entry_id}-{device.index}-{device_class}",
|
||||
device,
|
||||
)
|
||||
|
||||
continue
|
||||
|
||||
entities.append(
|
||||
ComelitHumidifierEntity(
|
||||
coordinator,
|
||||
@ -123,15 +142,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_clima_data"
|
||||
)
|
||||
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
values = device.val[1]
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
@ -154,6 +165,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._update_attributes()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_humidity(self, humidity: int) -> None:
|
||||
"""Set new target humidity."""
|
||||
if not self._attr_is_on:
|
||||
@ -171,6 +183,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_target_humidity = humidity
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_set_mode(self, mode: str) -> None:
|
||||
"""Set humidifier mode."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
@ -179,6 +192,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_mode = mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
@ -187,6 +201,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@bridge_api_call
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off."""
|
||||
await self.coordinator.api.set_humidity_status(
|
||||
|
@ -4,6 +4,18 @@
|
||||
"zone_status": {
|
||||
"default": "mdi:shield-check"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"automatic": "mdi:refresh-auto",
|
||||
"manual": "mdi:alpha-m"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -39,6 +40,7 @@ class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||
_attr_name = None
|
||||
_attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
|
||||
@bridge_api_call
|
||||
async def _light_set_state(self, state: int) -> None:
|
||||
"""Set desired light state."""
|
||||
await self.coordinator.api.set_device_status(LIGHT, self._device.index, state)
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiocomelit==0.12.1"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@ -26,9 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: todo
|
||||
comment: wrap api calls in try block
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
@ -55,10 +53,8 @@ rules:
|
||||
docs-known-limitations:
|
||||
status: exempt
|
||||
comment: no known limitations, yet
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: review and complete missing ones
|
||||
docs-supported-functions: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
@ -72,9 +68,7 @@ rules:
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: PR in progress
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
|
@ -23,11 +23,24 @@
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]",
|
||||
"type": "The type of your Comelit device."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"pin": "[%key:common::config_flow::data::pin%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::comelit::config::step::user::data_description::host%]",
|
||||
"port": "[%key:component::comelit::config::step::user::data_description::port%]",
|
||||
"pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
@ -61,6 +74,18 @@
|
||||
"dehumidifier": {
|
||||
"name": "Dehumidifier"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"thermostat": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"automatic": "[%key:common::state::auto%]",
|
||||
"manual": "[%key:common::state::manual%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
@ -76,6 +101,9 @@
|
||||
"cannot_authenticate": {
|
||||
"message": "Error authenticating"
|
||||
},
|
||||
"cannot_retrieve_data": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Failed to update data: {error}"
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -56,6 +57,7 @@ class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
||||
if device.type == OTHER:
|
||||
self._attr_device_class = SwitchDeviceClass.OUTLET
|
||||
|
||||
@bridge_api_call
|
||||
async def _switch_set_state(self, state: int) -> None:
|
||||
"""Set desired switch state."""
|
||||
await self.coordinator.api.set_device_status(
|
||||
|
@ -1,9 +1,25 @@
|
||||
"""Utils for Comelit."""
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
|
||||
|
||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
@ -11,3 +27,89 @@ async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
return aiohttp_client.async_create_clientsession(
|
||||
hass, verify_ssl=False, cookie_jar=CookieJar(unsafe=True)
|
||||
)
|
||||
|
||||
|
||||
def load_api_data(device: ComelitSerialBridgeObject, domain: str) -> list[Any]:
|
||||
"""Load data from the API."""
|
||||
# This function is called when the data is loaded from the API
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=domain, translation_key="invalid_clima_data"
|
||||
)
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
return device.val[0] if domain == CLIMATE_DOMAIN else device.val[1]
|
||||
|
||||
|
||||
async def cleanup_stale_entity(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
entry_unique_id: str,
|
||||
device: ComelitSerialBridgeObject,
|
||||
) -> None:
|
||||
"""Cleanup stale entity."""
|
||||
entity_reg: er.EntityRegistry = er.async_get(hass)
|
||||
|
||||
identifiers: list[str] = []
|
||||
|
||||
for entry in er.async_entries_for_config_entry(entity_reg, config_entry.entry_id):
|
||||
if entry.unique_id == entry_unique_id:
|
||||
entry_name = entry.name or entry.original_name
|
||||
_LOGGER.info("Removing entity: %s [%s]", entry.entity_id, entry_name)
|
||||
entity_reg.async_remove(entry.entity_id)
|
||||
identifiers.append(f"{config_entry.entry_id}-{device.type}-{device.index}")
|
||||
|
||||
if len(identifiers) > 0:
|
||||
_async_remove_state_config_entry_from_devices(hass, identifiers, config_entry)
|
||||
|
||||
|
||||
def _async_remove_state_config_entry_from_devices(
|
||||
hass: HomeAssistant, identifiers: list[str], config_entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Remove config entry from device."""
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
for identifier in identifiers:
|
||||
device = device_registry.async_get_device(identifiers={(DOMAIN, identifier)})
|
||||
if device:
|
||||
_LOGGER.info(
|
||||
"Removing config entry %s from device %s",
|
||||
config_entry.title,
|
||||
device.name,
|
||||
)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=config_entry.entry_id,
|
||||
)
|
||||
|
||||
|
||||
def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
|
||||
func: Callable[Concatenate[_T, _P], Awaitable[None]],
|
||||
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]:
|
||||
"""Catch Bridge API call exceptions."""
|
||||
|
||||
@wraps(func)
|
||||
async def cmd_wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
"""Wrap all command methods."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except CannotConnect as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotRetrieveData as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_retrieve_data",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except CannotAuthenticate:
|
||||
self.coordinator.last_update_success = False
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
|
||||
return cmd_wrapper
|
||||
|
@ -9,10 +9,12 @@ from typing import Any
|
||||
from homeassistant.components.notify import BaseNotificationService
|
||||
from homeassistant.const import CONF_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util.process import kill_subprocess
|
||||
|
||||
from .const import CONF_COMMAND_TIMEOUT
|
||||
from .const import CONF_COMMAND_TIMEOUT, LOGGER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -43,8 +45,31 @@ class CommandLineNotificationService(BaseNotificationService):
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a command line."""
|
||||
command = self.command
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, self.hass)
|
||||
|
||||
rendered_args = None
|
||||
if args_compiled:
|
||||
args_to_render = {"arguments": args}
|
||||
try:
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return
|
||||
|
||||
if rendered_args != args:
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s, with message: %s", command, message)
|
||||
|
||||
with subprocess.Popen( # noqa: S602 # shell by design
|
||||
self.command,
|
||||
command,
|
||||
universal_newlines=True,
|
||||
stdin=subprocess.PIPE,
|
||||
close_fds=False, # required for posix_spawn
|
||||
@ -56,10 +81,10 @@ class CommandLineNotificationService(BaseNotificationService):
|
||||
_LOGGER.error(
|
||||
"Command failed (with return code %s): %s",
|
||||
proc.returncode,
|
||||
self.command,
|
||||
command,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
_LOGGER.error("Timeout for command: %s", self.command)
|
||||
_LOGGER.error("Timeout for command: %s", command)
|
||||
kill_subprocess(proc)
|
||||
except subprocess.SubprocessError:
|
||||
_LOGGER.error("Error trying to exec command: %s", self.command)
|
||||
_LOGGER.error("Error trying to exec command: %s", command)
|
||||
|
@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.2.2"]
|
||||
"requirements": ["numpy==2.2.6"]
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
@ -10,18 +11,23 @@ from homeassistant import config_entries
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.websocket_api import ERR_NOT_FOUND, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.entity_component import async_get_entity_suggested_object_id
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> bool:
|
||||
"""Enable the Entity Registry views."""
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_get_automatic_entity_ids)
|
||||
websocket_api.async_register_command(hass, websocket_get_entities)
|
||||
websocket_api.async_register_command(hass, websocket_get_entity)
|
||||
websocket_api.async_register_command(hass, websocket_list_entities_for_display)
|
||||
@ -316,3 +322,54 @@ def websocket_remove_entity(
|
||||
|
||||
registry.async_remove(msg["entity_id"])
|
||||
connection.send_message(websocket_api.result_message(msg["id"]))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/entity_registry/get_automatic_entity_ids",
|
||||
vol.Required("entity_ids"): cv.entity_ids,
|
||||
}
|
||||
)
|
||||
@callback
|
||||
def websocket_get_automatic_entity_ids(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return the automatic entity IDs for the given entity IDs.
|
||||
|
||||
This is used to help user reset entity IDs which have been customized by the user.
|
||||
"""
|
||||
registry = er.async_get(hass)
|
||||
|
||||
entity_ids = msg["entity_ids"]
|
||||
automatic_entity_ids: dict[str, str | None] = {}
|
||||
reserved_entity_ids: set[str] = set()
|
||||
for entity_id in entity_ids:
|
||||
if not (entry := registry.entities.get(entity_id)):
|
||||
automatic_entity_ids[entity_id] = None
|
||||
continue
|
||||
try:
|
||||
suggested = async_get_entity_suggested_object_id(hass, entity_id)
|
||||
except HomeAssistantError as err:
|
||||
# This is raised if the entity has no object.
|
||||
_LOGGER.debug(
|
||||
"Unable to get suggested object ID for %s, entity ID: %s (%s)",
|
||||
entry.entity_id,
|
||||
entity_id,
|
||||
err,
|
||||
)
|
||||
automatic_entity_ids[entity_id] = None
|
||||
continue
|
||||
suggested_entity_id = registry.async_generate_entity_id(
|
||||
entry.domain,
|
||||
suggested or f"{entry.platform}_{entry.unique_id}",
|
||||
current_entity_id=entity_id,
|
||||
reserved_entity_ids=reserved_entity_ids,
|
||||
)
|
||||
automatic_entity_ids[entity_id] = suggested_entity_id
|
||||
reserved_entity_ids.add(suggested_entity_id)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg["id"], automatic_entity_ids)
|
||||
)
|
||||
|
@ -203,7 +203,11 @@ def async_get_agent_info(
|
||||
name = agent.name
|
||||
if not isinstance(name, str):
|
||||
name = agent.entity_id
|
||||
return AgentInfo(id=agent.entity_id, name=name)
|
||||
return AgentInfo(
|
||||
id=agent.entity_id,
|
||||
name=name,
|
||||
supports_streaming=agent.supports_streaming,
|
||||
)
|
||||
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user