mirror of
https://github.com/home-assistant/core.git
synced 2025-11-20 16:26:57 +00:00
Compare commits
2 Commits
tibber_dat
...
mqtt-suben
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
466fb0131e | ||
|
|
d479bba351 |
@@ -87,7 +87,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(conditions|quality_scale|services|triggers)\.yaml|homeassistant/brands/.*\.json|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
|
||||
@@ -579,7 +579,6 @@ homeassistant.components.wiz.*
|
||||
homeassistant.components.wled.*
|
||||
homeassistant.components.workday.*
|
||||
homeassistant.components.worldclock.*
|
||||
homeassistant.components.xbox.*
|
||||
homeassistant.components.xiaomi_ble.*
|
||||
homeassistant.components.yale_smart_alarm.*
|
||||
homeassistant.components.yalexs_ble.*
|
||||
|
||||
@@ -45,7 +45,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||
|
||||
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
"""AdGuard Home Update platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adguardhome import AdGuardHomeError
|
||||
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdGuardConfigEntry, AdGuardData
|
||||
from .const import DOMAIN
|
||||
from .entity import AdGuardHomeEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdGuardConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AdGuard Home update entity based on a config entry."""
|
||||
data = entry.runtime_data
|
||||
|
||||
if (await data.client.update.update_available()).disabled:
|
||||
return
|
||||
|
||||
async_add_entities([AdGuardHomeUpdate(data, entry)], True)
|
||||
|
||||
|
||||
class AdGuardHomeUpdate(AdGuardHomeEntity, UpdateEntity):
|
||||
"""Defines an AdGuard Home update."""
|
||||
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: AdGuardData,
|
||||
entry: AdGuardConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home update."""
|
||||
super().__init__(data, entry)
|
||||
|
||||
self._attr_unique_id = "_".join(
|
||||
[DOMAIN, self.adguard.host, str(self.adguard.port), "update"]
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
value = await self.adguard.update.update_available()
|
||||
self._attr_installed_version = self.data.version
|
||||
self._attr_latest_version = value.new_version
|
||||
self._attr_release_summary = value.announcement
|
||||
self._attr_release_url = value.announcement_url
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
"""Install latest update."""
|
||||
try:
|
||||
await self.adguard.update.begin_update()
|
||||
except AdGuardHomeError as err:
|
||||
raise HomeAssistantError(f"Failed to install update: {err}") from err
|
||||
self.hass.config_entries.async_schedule_reload(self._entry.entry_id)
|
||||
@@ -392,7 +392,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
@@ -459,7 +459,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
"requirements": ["anthropic==0.69.0"]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==3.0.0",
|
||||
"dbus-fast==2.45.0",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["gassist-text==0.0.14"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: No polling.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration acts as a service and does not represent physical devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: This is a cloud service integration that cannot be discovered locally.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repairs.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library uses gRPC, not aiohttp/httpx, for communication.
|
||||
strict-typing: done
|
||||
@@ -56,9 +56,6 @@
|
||||
"init": {
|
||||
"data": {
|
||||
"language_code": "Language code"
|
||||
},
|
||||
"data_description": {
|
||||
"language_code": "Language for the Google Assistant SDK requests and responses."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,6 @@ from .const import DOMAIN
|
||||
if TYPE_CHECKING:
|
||||
from . import GoogleSheetsConfigEntry
|
||||
|
||||
ADD_CREATED_COLUMN = "add_created_column"
|
||||
DATA = "data"
|
||||
DATA_CONFIG_ENTRY = "config_entry"
|
||||
ROWS = "rows"
|
||||
@@ -44,7 +43,6 @@ SHEET_SERVICE_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(WORKSHEET): cv.string,
|
||||
vol.Optional(ADD_CREATED_COLUMN, default=True): cv.boolean,
|
||||
vol.Required(DATA): vol.Any(cv.ensure_list, [dict]),
|
||||
},
|
||||
)
|
||||
@@ -71,11 +69,10 @@ def _append_to_sheet(call: ServiceCall, entry: GoogleSheetsConfigEntry) -> None:
|
||||
|
||||
worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title))
|
||||
columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), [])
|
||||
add_created_column = call.data[ADD_CREATED_COLUMN]
|
||||
now = str(datetime.now())
|
||||
rows = []
|
||||
for d in call.data[DATA]:
|
||||
row_data = ({"created": now} | d) if add_created_column else d
|
||||
row_data = {"created": now} | d
|
||||
row = [row_data.get(column, "") for column in columns]
|
||||
for key, value in row_data.items():
|
||||
if key not in columns:
|
||||
|
||||
@@ -9,11 +9,6 @@ append_sheet:
|
||||
example: "Sheet1"
|
||||
selector:
|
||||
text:
|
||||
add_created_column:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
data:
|
||||
required: true
|
||||
example: '{"hello": world, "cool": True, "count": 5}'
|
||||
|
||||
@@ -45,10 +45,6 @@
|
||||
"append_sheet": {
|
||||
"description": "Appends data to a worksheet in Google Sheets.",
|
||||
"fields": {
|
||||
"add_created_column": {
|
||||
"description": "Add a \"created\" column with the current date-time to the appended data.",
|
||||
"name": "Add created column"
|
||||
},
|
||||
"config_entry": {
|
||||
"description": "The sheet to add data to.",
|
||||
"name": "Sheet"
|
||||
|
||||
@@ -12,7 +12,6 @@ from pyicloud.exceptions import (
|
||||
PyiCloudFailedLoginException,
|
||||
PyiCloudNoDevicesException,
|
||||
PyiCloudServiceNotActivatedException,
|
||||
PyiCloudServiceUnavailable,
|
||||
)
|
||||
from pyicloud.services.findmyiphone import AppleDevice
|
||||
|
||||
@@ -131,21 +130,15 @@ class IcloudAccount:
|
||||
except (
|
||||
PyiCloudServiceNotActivatedException,
|
||||
PyiCloudNoDevicesException,
|
||||
PyiCloudServiceUnavailable,
|
||||
) as err:
|
||||
_LOGGER.error("No iCloud device found")
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
if user_info is None:
|
||||
raise ConfigEntryNotReady("No user info found in iCloud devices response")
|
||||
|
||||
self._owner_fullname = (
|
||||
f"{user_info.get('firstName')} {user_info.get('lastName')}"
|
||||
)
|
||||
self._owner_fullname = f"{user_info['firstName']} {user_info['lastName']}"
|
||||
|
||||
self._family_members_fullname = {}
|
||||
if user_info.get("membersInfo") is not None:
|
||||
for prs_id, member in user_info.get("membersInfo").items():
|
||||
for prs_id, member in user_info["membersInfo"].items():
|
||||
self._family_members_fullname[prs_id] = (
|
||||
f"{member['firstName']} {member['lastName']}"
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/icloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["keyrings.alt", "pyicloud"],
|
||||
"requirements": ["pyicloud==2.2.0"]
|
||||
"requirements": ["pyicloud==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -237,23 +237,14 @@ class SettingDataUpdateCoordinator(
|
||||
"""Implementation of PlenticoreUpdateCoordinator for settings data."""
|
||||
|
||||
async def _async_update_data(self) -> Mapping[str, Mapping[str, str]]:
|
||||
if (client := self._plenticore.client) is None:
|
||||
client = self._plenticore.client
|
||||
|
||||
if not self._fetch or client is None:
|
||||
return {}
|
||||
|
||||
fetch = defaultdict(set)
|
||||
_LOGGER.debug("Fetching %s for %s", self.name, self._fetch)
|
||||
|
||||
for module_id, data_ids in self._fetch.items():
|
||||
fetch[module_id].update(data_ids)
|
||||
|
||||
for module_id, data_id in self.async_contexts():
|
||||
fetch[module_id].add(data_id)
|
||||
|
||||
if not fetch:
|
||||
return {}
|
||||
|
||||
_LOGGER.debug("Fetching %s for %s", self.name, fetch)
|
||||
|
||||
return await client.get_setting_values(fetch)
|
||||
return await client.get_setting_values(self._fetch)
|
||||
|
||||
|
||||
class PlenticoreSelectUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
||||
|
||||
@@ -34,29 +34,6 @@ async def async_get_config_entry_diagnostics(
|
||||
},
|
||||
}
|
||||
|
||||
# Add important information how the inverter is configured
|
||||
string_count_setting = await plenticore.client.get_setting_values(
|
||||
"devices:local", "Properties:StringCnt"
|
||||
)
|
||||
try:
|
||||
string_count = int(
|
||||
string_count_setting["devices:local"]["Properties:StringCnt"]
|
||||
)
|
||||
except ValueError:
|
||||
string_count = 0
|
||||
|
||||
configuration_settings = await plenticore.client.get_setting_values(
|
||||
"devices:local",
|
||||
(
|
||||
"Properties:StringCnt",
|
||||
*(f"Properties:String{idx}Features" for idx in range(string_count)),
|
||||
),
|
||||
)
|
||||
|
||||
data["configuration"] = {
|
||||
**configuration_settings,
|
||||
}
|
||||
|
||||
device_info = {**plenticore.device_info}
|
||||
device_info[ATTR_IDENTIFIERS] = REDACTED # contains serial number
|
||||
data["device"] = device_info
|
||||
|
||||
@@ -5,13 +5,12 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -67,7 +66,7 @@ async def async_setup_entry(
|
||||
"""Add kostal plenticore Switch."""
|
||||
plenticore = entry.runtime_data
|
||||
|
||||
entities: list[Entity] = []
|
||||
entities = []
|
||||
|
||||
available_settings_data = await plenticore.client.get_settings()
|
||||
settings_data_update_coordinator = SettingDataUpdateCoordinator(
|
||||
@@ -104,57 +103,6 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
# add shadow management switches for strings which support it
|
||||
string_count_setting = await plenticore.client.get_setting_values(
|
||||
"devices:local", "Properties:StringCnt"
|
||||
)
|
||||
try:
|
||||
string_count = int(
|
||||
string_count_setting["devices:local"]["Properties:StringCnt"]
|
||||
)
|
||||
except ValueError:
|
||||
string_count = 0
|
||||
|
||||
dc_strings = tuple(range(string_count))
|
||||
dc_string_feature_ids = tuple(
|
||||
PlenticoreShadowMgmtSwitch.DC_STRING_FEATURE_DATA_ID % dc_string
|
||||
for dc_string in dc_strings
|
||||
)
|
||||
|
||||
dc_string_features = await plenticore.client.get_setting_values(
|
||||
PlenticoreShadowMgmtSwitch.MODULE_ID,
|
||||
dc_string_feature_ids,
|
||||
)
|
||||
|
||||
for dc_string, dc_string_feature_id in zip(
|
||||
dc_strings, dc_string_feature_ids, strict=True
|
||||
):
|
||||
try:
|
||||
dc_string_feature = int(
|
||||
dc_string_features[PlenticoreShadowMgmtSwitch.MODULE_ID][
|
||||
dc_string_feature_id
|
||||
]
|
||||
)
|
||||
except ValueError:
|
||||
dc_string_feature = 0
|
||||
|
||||
if dc_string_feature == PlenticoreShadowMgmtSwitch.SHADOW_MANAGEMENT_SUPPORT:
|
||||
entities.append(
|
||||
PlenticoreShadowMgmtSwitch(
|
||||
settings_data_update_coordinator,
|
||||
dc_string,
|
||||
entry.entry_id,
|
||||
entry.title,
|
||||
plenticore.device_info,
|
||||
)
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Skipping shadow management for DC string %d, not supported (Feature: %d)",
|
||||
dc_string + 1,
|
||||
dc_string_feature,
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -188,6 +136,7 @@ class PlenticoreDataSwitch(
|
||||
self.off_value = description.off_value
|
||||
self.off_label = description.off_label
|
||||
self._attr_unique_id = f"{entry_id}_{description.module_id}_{description.key}"
|
||||
|
||||
self._attr_device_info = device_info
|
||||
|
||||
@property
|
||||
@@ -240,98 +189,3 @@ class PlenticoreDataSwitch(
|
||||
f"{self.platform_name} {self._name} {self.off_label}"
|
||||
)
|
||||
return bool(self.coordinator.data[self.module_id][self.data_id] == self._is_on)
|
||||
|
||||
|
||||
class PlenticoreShadowMgmtSwitch(
|
||||
CoordinatorEntity[SettingDataUpdateCoordinator], SwitchEntity
|
||||
):
|
||||
"""Representation of a Plenticore Switch for shadow management.
|
||||
|
||||
The shadow management switch can be controlled for each DC string separately. The DC string is
|
||||
coded as bit in a single settings value, bit 0 for DC string 1, bit 1 for DC string 2, etc.
|
||||
|
||||
Not all DC strings are available for shadown management, for example if one of them is used
|
||||
for a battery.
|
||||
"""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: SwitchEntityDescription
|
||||
|
||||
MODULE_ID: Final = "devices:local"
|
||||
|
||||
SHADOW_DATA_ID: Final = "Generator:ShadowMgmt:Enable"
|
||||
"""Settings id for the bit coded shadow management."""
|
||||
|
||||
DC_STRING_FEATURE_DATA_ID: Final = "Properties:String%dFeatures"
|
||||
"""Settings id pattern for the DC string features."""
|
||||
|
||||
SHADOW_MANAGEMENT_SUPPORT: Final = 1
|
||||
"""Feature value for shadow management support in the DC string features."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: SettingDataUpdateCoordinator,
|
||||
dc_string: int,
|
||||
entry_id: str,
|
||||
platform_name: str,
|
||||
device_info: DeviceInfo,
|
||||
) -> None:
|
||||
"""Create a new Switch Entity for Plenticore shadow management."""
|
||||
super().__init__(coordinator, context=(self.MODULE_ID, self.SHADOW_DATA_ID))
|
||||
|
||||
self._mask: Final = 1 << dc_string
|
||||
|
||||
self.entity_description = SwitchEntityDescription(
|
||||
key=f"ShadowMgmt{dc_string}",
|
||||
name=f"Shadow Management DC string {dc_string + 1}",
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
self.platform_name = platform_name
|
||||
self._attr_name = f"{platform_name} {self.entity_description.name}"
|
||||
self._attr_unique_id = (
|
||||
f"{entry_id}_{self.MODULE_ID}_{self.SHADOW_DATA_ID}_{dc_string}"
|
||||
)
|
||||
self._attr_device_info = device_info
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.coordinator.data is not None
|
||||
and self.MODULE_ID in self.coordinator.data
|
||||
and self.SHADOW_DATA_ID in self.coordinator.data[self.MODULE_ID]
|
||||
)
|
||||
|
||||
def _get_shadow_mgmt_value(self) -> int:
|
||||
"""Return the current shadow management value for all strings as integer."""
|
||||
try:
|
||||
return int(self.coordinator.data[self.MODULE_ID][self.SHADOW_DATA_ID])
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn shadow management on."""
|
||||
shadow_mgmt_value = self._get_shadow_mgmt_value()
|
||||
shadow_mgmt_value |= self._mask
|
||||
|
||||
if await self.coordinator.async_write_data(
|
||||
self.MODULE_ID, {self.SHADOW_DATA_ID: str(shadow_mgmt_value)}
|
||||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn shadow management off."""
|
||||
shadow_mgmt_value = self._get_shadow_mgmt_value()
|
||||
shadow_mgmt_value &= ~self._mask
|
||||
|
||||
if await self.coordinator.async_write_data(
|
||||
self.MODULE_ID, {self.SHADOW_DATA_ID: str(shadow_mgmt_value)}
|
||||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if shadow management is on."""
|
||||
return (self._get_shadow_mgmt_value() & self._mask) != 0
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for LCN binary sensors."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
|
||||
import pypck
|
||||
@@ -20,7 +19,6 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -71,11 +69,21 @@ class LcnBinarySensor(LcnEntity, BinarySensorEntity):
|
||||
config[CONF_DOMAIN_DATA][CONF_SOURCE]
|
||||
]
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_binary_sensors(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.bin_sensor_port
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
self.bin_sensor_port
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""Support for LCN climate control."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -38,7 +36,6 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -113,6 +110,20 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.variable)
|
||||
await self.device_connection.activate_status_request_handler(self.setpoint)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.variable)
|
||||
await self.device_connection.cancel_status_request_handler(self.setpoint)
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the unit of measurement."""
|
||||
@@ -181,17 +192,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
self._target_temperature = temperature
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_variable(
|
||||
self.setpoint, SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set temperature value when LCN input object is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusVar):
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""Support for LCN covers."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -29,7 +27,6 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -76,7 +73,7 @@ async def async_setup_entry(
|
||||
class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
"""Representation of a LCN cover connected to output ports."""
|
||||
|
||||
_attr_is_closed = True
|
||||
_attr_is_closed = False
|
||||
_attr_is_closing = False
|
||||
_attr_is_opening = False
|
||||
_attr_assumed_state = True
|
||||
@@ -96,6 +93,28 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
else:
|
||||
self.reverse_time = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"]
|
||||
)
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"]
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"]
|
||||
)
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"]
|
||||
)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
state = pypck.lcn_defs.MotorStateModifier.DOWN
|
||||
@@ -128,18 +147,6 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
self._attr_is_opening = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
if not self.device_connection.is_group:
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set cover states when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -168,7 +175,7 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
"""Representation of a LCN cover connected to relays."""
|
||||
|
||||
_attr_is_closed = True
|
||||
_attr_is_closed = False
|
||||
_attr_is_closing = False
|
||||
_attr_is_opening = False
|
||||
_attr_assumed_state = True
|
||||
@@ -199,6 +206,20 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
self._is_closing = False
|
||||
self._is_opening = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.motor, self.positioning_mode
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.motor)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
if not await self.device_connection.control_motor_relays(
|
||||
@@ -253,17 +274,6 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
coros = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
|
||||
if self.positioning_mode == pypck.lcn_defs.MotorPositioningMode.BS4:
|
||||
coros.append(
|
||||
self.device_connection.request_status_motor_position(
|
||||
self.motor, self.positioning_mode, SCAN_INTERVAL.seconds
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*coros)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set cover states when LCN input object (command) is received."""
|
||||
if isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
|
||||
@@ -22,6 +22,7 @@ from .helpers import (
|
||||
class LcnEntity(Entity):
|
||||
"""Parent class for all entities associated with the LCN component."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
device_connection: DeviceConnectionType
|
||||
|
||||
@@ -56,24 +57,15 @@ class LcnEntity(Entity):
|
||||
).lower(),
|
||||
)
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Groups may not poll for a status."""
|
||||
return not self.device_connection.is_group
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
self.device_connection = get_device_connection(
|
||||
self.hass, self.config[CONF_ADDRESS], self.config_entry
|
||||
)
|
||||
if self.device_connection.is_group:
|
||||
return
|
||||
|
||||
self._unregister_for_inputs = self.device_connection.register_for_inputs(
|
||||
self.input_received
|
||||
)
|
||||
|
||||
self.schedule_update_ha_state(force_refresh=True)
|
||||
if not self.device_connection.is_group:
|
||||
self._unregister_for_inputs = self.device_connection.register_for_inputs(
|
||||
self.input_received
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
@@ -251,19 +251,13 @@ async def async_update_device_config(
|
||||
"""Fill missing values in device_config with infos from LCN bus."""
|
||||
# fetch serial info if device is module
|
||||
if not (is_group := device_config[CONF_ADDRESS][2]): # is module
|
||||
await device_connection.serials_known()
|
||||
await device_connection.serial_known
|
||||
if device_config[CONF_HARDWARE_SERIAL] == -1:
|
||||
device_config[CONF_HARDWARE_SERIAL] = (
|
||||
device_connection.serials.hardware_serial
|
||||
)
|
||||
device_config[CONF_HARDWARE_SERIAL] = device_connection.hardware_serial
|
||||
if device_config[CONF_SOFTWARE_SERIAL] == -1:
|
||||
device_config[CONF_SOFTWARE_SERIAL] = (
|
||||
device_connection.serials.software_serial
|
||||
)
|
||||
device_config[CONF_SOFTWARE_SERIAL] = device_connection.software_serial
|
||||
if device_config[CONF_HARDWARE_TYPE] == -1:
|
||||
device_config[CONF_HARDWARE_TYPE] = (
|
||||
device_connection.serials.hardware_type.value
|
||||
)
|
||||
device_config[CONF_HARDWARE_TYPE] = device_connection.hardware_type.value
|
||||
|
||||
# fetch name if device is module
|
||||
if device_config[CONF_NAME] != "":
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for LCN lights."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -34,7 +33,6 @@ from .helpers import InputType, LcnConfigEntry
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -102,6 +100,18 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
self._attr_color_mode = ColorMode.ONOFF
|
||||
self._attr_supported_color_modes = {self._attr_color_mode}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
@@ -147,12 +157,6 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -180,6 +184,18 @@ class LcnRelayLight(LcnEntity, LightEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8
|
||||
@@ -198,10 +214,6 @@ class LcnRelayLight(LcnEntity, LightEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_polling",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.9.2", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.8.12", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for LCN sensors."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
@@ -41,8 +40,6 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
DEVICE_CLASS_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.CELSIUS: SensorDeviceClass.TEMPERATURE,
|
||||
@@ -131,11 +128,17 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
)
|
||||
self._attr_device_class = DEVICE_CLASS_MAPPING.get(self.unit)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.variable)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.variable)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
@@ -167,11 +170,17 @@ class LcnLedLogicSensor(LcnEntity, SensorEntity):
|
||||
config[CONF_DOMAIN_DATA][CONF_SOURCE]
|
||||
]
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_led_and_logic_ops(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.source)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.source)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
|
||||
@@ -380,6 +380,9 @@ class LockKeys(LcnServiceCall):
|
||||
else:
|
||||
await device_connection.lock_keys(table_id, states)
|
||||
|
||||
handler = device_connection.status_requests_handler
|
||||
await handler.request_status_locked_keys_timeout()
|
||||
|
||||
|
||||
class DynText(LcnServiceCall):
|
||||
"""Send dynamic text to LCN-GTxD displays."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Support for LCN switches."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -18,7 +17,6 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_switch_entities(
|
||||
@@ -79,6 +77,18 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.OutputPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if not await self.device_connection.dim_output(self.output.value, 100, 0):
|
||||
@@ -93,12 +103,6 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -122,6 +126,18 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8
|
||||
@@ -140,10 +156,6 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
@@ -167,6 +179,22 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
]
|
||||
self.reg_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint_variable)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if not await self.device_connection.lock_regulator(self.reg_id, True):
|
||||
@@ -181,12 +209,6 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_variable(
|
||||
self.setpoint_variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -212,6 +234,18 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
self.table_id = ord(self.key.name[0]) - 65
|
||||
self.key_id = int(self.key.name[1]) - 1
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.key)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.key)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8
|
||||
@@ -234,10 +268,6 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_locked_keys(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
|
||||
@@ -239,6 +239,7 @@ from .const import (
|
||||
CONF_OSCILLATION_COMMAND_TOPIC,
|
||||
CONF_OSCILLATION_STATE_TOPIC,
|
||||
CONF_OSCILLATION_VALUE_TEMPLATE,
|
||||
CONF_PATTERN,
|
||||
CONF_PAYLOAD_ARM_AWAY,
|
||||
CONF_PAYLOAD_ARM_CUSTOM_BYPASS,
|
||||
CONF_PAYLOAD_ARM_HOME,
|
||||
@@ -465,6 +466,7 @@ SUBENTRY_PLATFORMS = [
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.TEXT,
|
||||
]
|
||||
|
||||
_CODE_VALIDATION_MODE = {
|
||||
@@ -819,6 +821,16 @@ TEMPERATURE_UNIT_SELECTOR = SelectSelector(
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
TEXT_MODE_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[TextSelectorType.TEXT.value, TextSelectorType.PASSWORD.value],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="text_mode",
|
||||
)
|
||||
)
|
||||
TEXT_SIZE_SELECTOR = NumberSelector(
|
||||
NumberSelectorConfig(min=0, max=255, step=1, mode=NumberSelectorMode.BOX)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -1151,6 +1163,22 @@ def validate_sensor_platform_config(
|
||||
return errors
|
||||
|
||||
|
||||
@callback
|
||||
def validate_text_platform_config(
|
||||
config: dict[str, Any],
|
||||
) -> dict[str, str]:
|
||||
"""Validate the text entity options."""
|
||||
errors: dict[str, str] = {}
|
||||
if (
|
||||
CONF_MIN in config
|
||||
and CONF_MAX in config
|
||||
and config[CONF_MIN] > config[CONF_MAX]
|
||||
):
|
||||
errors["text_advanced_settings"] = "max_below_min"
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
ENTITY_CONFIG_VALIDATOR: dict[
|
||||
str,
|
||||
Callable[[dict[str, Any]], dict[str, str]] | None,
|
||||
@@ -1170,6 +1198,7 @@ ENTITY_CONFIG_VALIDATOR: dict[
|
||||
Platform.SENSOR: validate_sensor_platform_config,
|
||||
Platform.SIREN: None,
|
||||
Platform.SWITCH: None,
|
||||
Platform.TEXT: validate_text_platform_config,
|
||||
}
|
||||
|
||||
|
||||
@@ -1430,6 +1459,7 @@ PLATFORM_ENTITY_FIELDS: dict[Platform, dict[str, PlatformField]] = {
|
||||
selector=SWITCH_DEVICE_CLASS_SELECTOR, required=False
|
||||
),
|
||||
},
|
||||
Platform.TEXT: {},
|
||||
}
|
||||
PLATFORM_MQTT_FIELDS: dict[Platform, dict[str, PlatformField]] = {
|
||||
Platform.ALARM_CONTROL_PANEL: {
|
||||
@@ -3298,6 +3328,58 @@ PLATFORM_MQTT_FIELDS: dict[Platform, dict[str, PlatformField]] = {
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
CONF_OPTIMISTIC: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
},
|
||||
Platform.TEXT: {
|
||||
CONF_COMMAND_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=True,
|
||||
validator=valid_publish_topic,
|
||||
error="invalid_publish_topic",
|
||||
),
|
||||
CONF_COMMAND_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_STATE_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=False,
|
||||
validator=valid_subscribe_topic,
|
||||
error="invalid_subscribe_topic",
|
||||
),
|
||||
CONF_VALUE_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
CONF_MIN: PlatformField(
|
||||
selector=TEXT_SIZE_SELECTOR,
|
||||
required=True,
|
||||
default=0,
|
||||
section="text_advanced_settings",
|
||||
),
|
||||
CONF_MAX: PlatformField(
|
||||
selector=TEXT_SIZE_SELECTOR,
|
||||
required=True,
|
||||
default=255,
|
||||
section="text_advanced_settings",
|
||||
),
|
||||
CONF_MODE: PlatformField(
|
||||
selector=TEXT_MODE_SELECTOR,
|
||||
required=True,
|
||||
default=TextSelectorType.TEXT.value,
|
||||
section="text_advanced_settings",
|
||||
),
|
||||
CONF_PATTERN: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.is_regex),
|
||||
error="invalid_regular_expression",
|
||||
section="text_advanced_settings",
|
||||
),
|
||||
},
|
||||
}
|
||||
MQTT_DEVICE_PLATFORM_FIELDS = {
|
||||
ATTR_NAME: PlatformField(selector=TEXT_SELECTOR, required=True),
|
||||
@@ -4237,8 +4319,7 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
return self.async_show_form(
|
||||
step_id="entity",
|
||||
data_schema=data_schema,
|
||||
description_placeholders=TRANSLATION_DESCRIPTION_PLACEHOLDERS
|
||||
| {
|
||||
description_placeholders={
|
||||
"mqtt_device": device_name,
|
||||
"entity_name_label": entity_name_label,
|
||||
"platform_label": platform_label,
|
||||
|
||||
@@ -138,6 +138,7 @@ CONF_OSCILLATION_COMMAND_TOPIC = "oscillation_command_topic"
|
||||
CONF_OSCILLATION_COMMAND_TEMPLATE = "oscillation_command_template"
|
||||
CONF_OSCILLATION_STATE_TOPIC = "oscillation_state_topic"
|
||||
CONF_OSCILLATION_VALUE_TEMPLATE = "oscillation_value_template"
|
||||
CONF_PATTERN = "pattern"
|
||||
CONF_PAYLOAD_ARM_AWAY = "payload_arm_away"
|
||||
CONF_PAYLOAD_ARM_CUSTOM_BYPASS = "payload_arm_custom_bypass"
|
||||
CONF_PAYLOAD_ARM_HOME = "payload_arm_home"
|
||||
|
||||
@@ -970,6 +970,21 @@
|
||||
"temperature_state_topic": "The MQTT topic to subscribe for changes of the target temperature. [Learn more.]({url}#temperature_state_topic)"
|
||||
},
|
||||
"name": "Target temperature settings"
|
||||
},
|
||||
"text_advanced_settings": {
|
||||
"data": {
|
||||
"max": "Maximum length",
|
||||
"min": "Mininum length",
|
||||
"mode": "Mode",
|
||||
"pattern": "Pattern"
|
||||
},
|
||||
"data_description": {
|
||||
"max": "Maximum length of the text input",
|
||||
"min": "Mininum length of the text input",
|
||||
"mode": "Mode of the text input",
|
||||
"pattern": "A valid regex pattern"
|
||||
},
|
||||
"name": "Advanced text settings"
|
||||
}
|
||||
},
|
||||
"title": "Configure MQTT device \"{mqtt_device}\""
|
||||
@@ -1387,7 +1402,8 @@
|
||||
"select": "[%key:component::select::title%]",
|
||||
"sensor": "[%key:component::sensor::title%]",
|
||||
"siren": "[%key:component::siren::title%]",
|
||||
"switch": "[%key:component::switch::title%]"
|
||||
"switch": "[%key:component::switch::title%]",
|
||||
"text": "[%key:component::text::title%]"
|
||||
}
|
||||
},
|
||||
"set_ca_cert": {
|
||||
@@ -1424,6 +1440,12 @@
|
||||
"none": "No target temperature",
|
||||
"single": "Single target temperature"
|
||||
}
|
||||
},
|
||||
"text_mode": {
|
||||
"options": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"text": "[%key:component::text::entity_component::_::state_attributes::mode::state::text%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -27,7 +27,14 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType
|
||||
|
||||
from . import subscription
|
||||
from .config import MQTT_RW_SCHEMA
|
||||
from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_STATE_TOPIC
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_MAX,
|
||||
CONF_MIN,
|
||||
CONF_PATTERN,
|
||||
CONF_STATE_TOPIC,
|
||||
)
|
||||
from .entity import MqttEntity, async_setup_entity_entry_helper
|
||||
from .models import (
|
||||
MqttCommandTemplate,
|
||||
@@ -42,12 +49,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
CONF_MAX = "max"
|
||||
CONF_MIN = "min"
|
||||
CONF_PATTERN = "pattern"
|
||||
|
||||
DEFAULT_NAME = "MQTT Text"
|
||||
DEFAULT_PAYLOAD_RESET = "None"
|
||||
|
||||
MQTT_TEXT_ATTRIBUTES_BLOCKED = frozenset(
|
||||
{
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==9.0.1"]
|
||||
"requirements": ["google-nest-sdm==7.1.4"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
rules:
|
||||
# Bronze
|
||||
config-flow: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: Some fields are missing a data_description
|
||||
brands: done
|
||||
dependency-transparency: done
|
||||
common-modules:
|
||||
|
||||
@@ -34,9 +34,6 @@
|
||||
"data": {
|
||||
"cloud_project_id": "Google Cloud Project ID"
|
||||
},
|
||||
"data_description": {
|
||||
"cloud_project_id": "The Google Cloud Project ID which can be obtained from the Cloud Console"
|
||||
},
|
||||
"description": "Enter the Cloud Project ID below e.g. *example-project-12345*. See the [Google Cloud Console]({cloud_console_url}) or the documentation for [more info]({more_info_url}).",
|
||||
"title": "Nest: Enter Cloud Project ID"
|
||||
},
|
||||
@@ -48,9 +45,6 @@
|
||||
"data": {
|
||||
"project_id": "Device Access Project ID"
|
||||
},
|
||||
"data_description": {
|
||||
"project_id": "The Device Access Project ID which can be obtained from the Device Access Console"
|
||||
},
|
||||
"description": "Create a Nest Device Access project which **requires paying Google a US $5 fee** to set up.\n1. Go to the [Device Access Console]({device_access_console_url}), and through the payment flow.\n1. Select on **Create project**\n1. Give your Device Access project a name and select **Next**.\n1. Enter your OAuth Client ID\n1. Skip enabling events for now and select **Create project**.\n\nEnter your Device Access Project ID below ([more info]({more_info_url})).",
|
||||
"title": "Nest: Create a Device Access Project"
|
||||
},
|
||||
@@ -70,9 +64,6 @@
|
||||
"data": {
|
||||
"subscription_name": "Pub/Sub subscription name"
|
||||
},
|
||||
"data_description": {
|
||||
"subscription_name": "The Pub/Sub subscription name to receive Nest device updates"
|
||||
},
|
||||
"description": "Home Assistant receives realtime Nest device updates with a Cloud Pub/Sub subscription for topic `{topic}`.\n\nSelect an existing subscription below if one already exists, or the next step will create a new one for you. See the integration documentation for [more info]({more_info_url}).",
|
||||
"title": "Configure Cloud Pub/Sub subscription"
|
||||
},
|
||||
@@ -80,9 +71,6 @@
|
||||
"data": {
|
||||
"topic_name": "Pub/Sub topic name"
|
||||
},
|
||||
"data_description": {
|
||||
"topic_name": "The Pub/Sub topic name configured in the Device Access Console"
|
||||
},
|
||||
"description": "Nest devices publish updates on a Cloud Pub/Sub topic. You can select an existing topic if one exists, or choose to create a new topic and the next step will create it for you with the necessary permissions. See the integration documentation for [more info]({more_info_url}).",
|
||||
"title": "Configure Cloud Pub/Sub topic"
|
||||
},
|
||||
|
||||
@@ -37,7 +37,6 @@ SELECT_TYPES = (
|
||||
PlugwiseSelectEntityDescription(
|
||||
key=SELECT_SCHEDULE,
|
||||
translation_key=SELECT_SCHEDULE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_key="available_schedules",
|
||||
),
|
||||
PlugwiseSelectEntityDescription(
|
||||
|
||||
@@ -48,6 +48,7 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
key="setpoint_high",
|
||||
@@ -55,6 +56,7 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
key="setpoint_low",
|
||||
@@ -62,11 +64,13 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
@@ -90,7 +94,6 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
translation_key="outdoor_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
@@ -349,8 +352,8 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
key="illuminance",
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
key="modulation_level",
|
||||
@@ -362,8 +365,8 @@ SENSORS: tuple[PlugwiseSensorEntityDescription, ...] = (
|
||||
PlugwiseSensorEntityDescription(
|
||||
key="valve_position",
|
||||
translation_key="valve_position",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
PlugwiseSensorEntityDescription(
|
||||
|
||||
@@ -222,13 +222,17 @@ class ReolinkHost:
|
||||
enable_onvif = None
|
||||
enable_rtmp = None
|
||||
|
||||
if not self._api.rtsp_enabled and self._api.supported(None, "RTSP"):
|
||||
if not self._api.rtsp_enabled and not self._api.baichuan_only:
|
||||
_LOGGER.debug(
|
||||
"RTSP is disabled on %s, trying to enable it", self._api.nvr_name
|
||||
)
|
||||
enable_rtsp = True
|
||||
|
||||
if not self._api.onvif_enabled and onvif_supported:
|
||||
if (
|
||||
not self._api.onvif_enabled
|
||||
and onvif_supported
|
||||
and not self._api.baichuan_only
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"ONVIF is disabled on %s, trying to enable it", self._api.nvr_name
|
||||
)
|
||||
|
||||
@@ -10,7 +10,6 @@ from typing import Any
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
import voluptuous as vol
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_AUTHENTICATION,
|
||||
@@ -52,7 +51,6 @@ SUPPORT_REST_METHODS = ["get", "patch", "post", "put", "delete"]
|
||||
|
||||
CONF_CONTENT_TYPE = "content_type"
|
||||
CONF_INSECURE_CIPHER = "insecure_cipher"
|
||||
CONF_SKIP_URL_ENCODING = "skip_url_encoding"
|
||||
|
||||
COMMAND_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -71,7 +69,6 @@ COMMAND_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_CONTENT_TYPE): cv.string,
|
||||
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
|
||||
vol.Optional(CONF_INSECURE_CIPHER, default=False): cv.boolean,
|
||||
vol.Optional(CONF_SKIP_URL_ENCODING, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -116,7 +113,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
method = command_config[CONF_METHOD]
|
||||
|
||||
template_url = command_config[CONF_URL]
|
||||
skip_url_encoding = command_config[CONF_SKIP_URL_ENCODING]
|
||||
|
||||
auth = None
|
||||
digest_middleware = None
|
||||
@@ -183,7 +179,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
request_kwargs["middlewares"] = (digest_middleware,)
|
||||
|
||||
async with getattr(websession, method)(
|
||||
URL(request_url, encoded=skip_url_encoding),
|
||||
request_url,
|
||||
**request_kwargs,
|
||||
) as response:
|
||||
if response.status < HTTPStatus.BAD_REQUEST:
|
||||
|
||||
@@ -7,23 +7,23 @@ rules:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
common-modules: todo
|
||||
config-flow-test-coverage: todo
|
||||
config-flow: todo
|
||||
dependency-transparency: todo
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not provide any service actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup: todo
|
||||
entity-unique-id: todo
|
||||
has-entity-name: todo
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
unique-config-entry: todo
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from typing import Final, cast
|
||||
|
||||
from aioshelly.block_device import Block
|
||||
from aioshelly.const import RPC_GENERATIONS
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -17,11 +16,10 @@ from homeassistant.components.binary_sensor import (
|
||||
from homeassistant.const import STATE_ON, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import CONF_SLEEP_PERIOD, MODEL_FRANKEVER_WATER_VALVE, ROLE_GENERIC
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .coordinator import ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import (
|
||||
BlockEntityDescription,
|
||||
RestEntityDescription,
|
||||
@@ -39,10 +37,6 @@ from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_entity_translation_attributes,
|
||||
get_rpc_channel_name,
|
||||
get_rpc_custom_name,
|
||||
get_rpc_key,
|
||||
is_block_momentary_input,
|
||||
is_rpc_momentary_input,
|
||||
is_view_for_platform,
|
||||
@@ -73,44 +67,6 @@ class RpcBinarySensor(ShellyRpcAttributeEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: RpcBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
key: str,
|
||||
attribute: str,
|
||||
description: RpcBinarySensorDescription,
|
||||
) -> None:
|
||||
"""Initialize sensor."""
|
||||
super().__init__(coordinator, key, attribute, description)
|
||||
|
||||
if hasattr(self, "_attr_name") and description.role != ROLE_GENERIC:
|
||||
if not description.role and description.key == "input":
|
||||
_, component, component_id = get_rpc_key(key)
|
||||
if not get_rpc_custom_name(coordinator.device, key) and (
|
||||
component.lower() == "input" and component_id.isnumeric()
|
||||
):
|
||||
self._attr_translation_placeholders = {"input_number": component_id}
|
||||
self._attr_translation_key = "input_with_number"
|
||||
else:
|
||||
return
|
||||
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
if not description.role and description.key != "input":
|
||||
translation_placeholders, translation_key = (
|
||||
get_entity_translation_attributes(
|
||||
get_rpc_channel_name(coordinator.device, key),
|
||||
description.translation_key,
|
||||
description.device_class,
|
||||
self._default_to_device_class_name(),
|
||||
)
|
||||
)
|
||||
|
||||
if translation_placeholders:
|
||||
self._attr_translation_placeholders = translation_placeholders
|
||||
if translation_key:
|
||||
self._attr_translation_key = translation_key
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if RPC sensor state is on."""
|
||||
@@ -151,84 +107,85 @@ class RpcBluTrvBinarySensor(RpcBinarySensor):
|
||||
SENSORS: dict[tuple[str, str], BlockBinarySensorDescription] = {
|
||||
("device", "overtemp"): BlockBinarySensorDescription(
|
||||
key="device|overtemp",
|
||||
translation_key="overheating",
|
||||
name="Overheating",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
("device", "overpower"): BlockBinarySensorDescription(
|
||||
key="device|overpower",
|
||||
translation_key="overpowering",
|
||||
name="Overpowering",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
("light", "overpower"): BlockBinarySensorDescription(
|
||||
key="light|overpower",
|
||||
translation_key="overpowering",
|
||||
name="Overpowering",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
("relay", "overpower"): BlockBinarySensorDescription(
|
||||
key="relay|overpower",
|
||||
translation_key="overpowering",
|
||||
name="Overpowering",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
("sensor", "dwIsOpened"): BlockBinarySensorDescription(
|
||||
key="sensor|dwIsOpened",
|
||||
translation_key="door",
|
||||
name="Door",
|
||||
device_class=BinarySensorDeviceClass.OPENING,
|
||||
available=lambda block: cast(int, block.dwIsOpened) != -1,
|
||||
),
|
||||
("sensor", "flood"): BlockBinarySensorDescription(
|
||||
key="sensor|flood",
|
||||
translation_key="flood",
|
||||
device_class=BinarySensorDeviceClass.MOISTURE,
|
||||
key="sensor|flood", name="Flood", device_class=BinarySensorDeviceClass.MOISTURE
|
||||
),
|
||||
("sensor", "gas"): BlockBinarySensorDescription(
|
||||
key="sensor|gas",
|
||||
name="Gas",
|
||||
device_class=BinarySensorDeviceClass.GAS,
|
||||
translation_key="gas",
|
||||
value=lambda value: value in ["mild", "heavy"],
|
||||
),
|
||||
("sensor", "smoke"): BlockBinarySensorDescription(
|
||||
key="sensor|smoke", device_class=BinarySensorDeviceClass.SMOKE
|
||||
key="sensor|smoke", name="Smoke", device_class=BinarySensorDeviceClass.SMOKE
|
||||
),
|
||||
("sensor", "vibration"): BlockBinarySensorDescription(
|
||||
key="sensor|vibration",
|
||||
name="Vibration",
|
||||
device_class=BinarySensorDeviceClass.VIBRATION,
|
||||
),
|
||||
("input", "input"): BlockBinarySensorDescription(
|
||||
key="input|input",
|
||||
translation_key="input",
|
||||
name="Input",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
removal_condition=is_block_momentary_input,
|
||||
),
|
||||
("relay", "input"): BlockBinarySensorDescription(
|
||||
key="relay|input",
|
||||
translation_key="input",
|
||||
name="Input",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
removal_condition=is_block_momentary_input,
|
||||
),
|
||||
("device", "input"): BlockBinarySensorDescription(
|
||||
key="device|input",
|
||||
translation_key="input",
|
||||
name="Input",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
removal_condition=is_block_momentary_input,
|
||||
),
|
||||
("sensor", "extInput"): BlockBinarySensorDescription(
|
||||
key="sensor|extInput",
|
||||
translation_key="external_input",
|
||||
name="External input",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
("sensor", "motion"): BlockBinarySensorDescription(
|
||||
key="sensor|motion", device_class=BinarySensorDeviceClass.MOTION
|
||||
key="sensor|motion", name="Motion", device_class=BinarySensorDeviceClass.MOTION
|
||||
),
|
||||
}
|
||||
|
||||
REST_SENSORS: Final = {
|
||||
"cloud": RestBinarySensorDescription(
|
||||
key="cloud",
|
||||
translation_key="cloud",
|
||||
name="Cloud",
|
||||
value=lambda status, _: status["cloud"]["connected"],
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -240,14 +197,13 @@ RPC_SENSORS: Final = {
|
||||
"input": RpcBinarySensorDescription(
|
||||
key="input",
|
||||
sub_key="state",
|
||||
translation_key="input",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
removal_condition=is_rpc_momentary_input,
|
||||
),
|
||||
"cloud": RpcBinarySensorDescription(
|
||||
key="cloud",
|
||||
sub_key="connected",
|
||||
translation_key="cloud",
|
||||
name="Cloud",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -255,7 +211,7 @@ RPC_SENSORS: Final = {
|
||||
"external_power": RpcBinarySensorDescription(
|
||||
key="devicepower",
|
||||
sub_key="external",
|
||||
translation_key="external_power",
|
||||
name="External power",
|
||||
value=lambda status, _: status["present"],
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -263,7 +219,7 @@ RPC_SENSORS: Final = {
|
||||
"overtemp": RpcBinarySensorDescription(
|
||||
key="switch",
|
||||
sub_key="errors",
|
||||
translation_key="overheating",
|
||||
name="Overheating",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
value=lambda status, _: False if status is None else "overtemp" in status,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -272,7 +228,7 @@ RPC_SENSORS: Final = {
|
||||
"overpower": RpcBinarySensorDescription(
|
||||
key="switch",
|
||||
sub_key="errors",
|
||||
translation_key="overpowering",
|
||||
name="Overpowering",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
value=lambda status, _: False if status is None else "overpower" in status,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -281,7 +237,7 @@ RPC_SENSORS: Final = {
|
||||
"overvoltage": RpcBinarySensorDescription(
|
||||
key="switch",
|
||||
sub_key="errors",
|
||||
translation_key="overvoltage",
|
||||
name="Overvoltage",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
value=lambda status, _: False if status is None else "overvoltage" in status,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -290,7 +246,7 @@ RPC_SENSORS: Final = {
|
||||
"overcurrent": RpcBinarySensorDescription(
|
||||
key="switch",
|
||||
sub_key="errors",
|
||||
translation_key="overcurrent",
|
||||
name="Overcurrent",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
value=lambda status, _: False if status is None else "overcurrent" in status,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -299,12 +255,13 @@ RPC_SENSORS: Final = {
|
||||
"smoke": RpcBinarySensorDescription(
|
||||
key="smoke",
|
||||
sub_key="alarm",
|
||||
name="Smoke",
|
||||
device_class=BinarySensorDeviceClass.SMOKE,
|
||||
),
|
||||
"restart": RpcBinarySensorDescription(
|
||||
key="sys",
|
||||
sub_key="restart_required",
|
||||
translation_key="restart_required",
|
||||
name="Restart required",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -312,7 +269,7 @@ RPC_SENSORS: Final = {
|
||||
"boolean_generic": RpcBinarySensorDescription(
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
removal_condition=lambda config, _, key: not is_view_for_platform(
|
||||
removal_condition=lambda config, _status, key: not is_view_for_platform(
|
||||
config, key, BINARY_SENSOR_PLATFORM
|
||||
),
|
||||
role=ROLE_GENERIC,
|
||||
@@ -328,7 +285,7 @@ RPC_SENSORS: Final = {
|
||||
"calibration": RpcBinarySensorDescription(
|
||||
key="blutrv",
|
||||
sub_key="errors",
|
||||
translation_key="calibration",
|
||||
name="Calibration",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
value=lambda status, _: False if status is None else "not_calibrated" in status,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -337,13 +294,13 @@ RPC_SENSORS: Final = {
|
||||
"flood": RpcBinarySensorDescription(
|
||||
key="flood",
|
||||
sub_key="alarm",
|
||||
translation_key="flood",
|
||||
name="Flood",
|
||||
device_class=BinarySensorDeviceClass.MOISTURE,
|
||||
),
|
||||
"mute": RpcBinarySensorDescription(
|
||||
key="flood",
|
||||
sub_key="mute",
|
||||
translation_key="mute",
|
||||
name="Mute",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"flood_cable_unplugged": RpcBinarySensorDescription(
|
||||
@@ -352,7 +309,7 @@ RPC_SENSORS: Final = {
|
||||
value=lambda status, _: False
|
||||
if status is None
|
||||
else "cable_unplugged" in status,
|
||||
translation_key="cable_unplugged",
|
||||
name="Cable unplugged",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported=lambda status: status.get("alarm") is not None,
|
||||
@@ -361,12 +318,14 @@ RPC_SENSORS: Final = {
|
||||
key="presence",
|
||||
sub_key="num_objects",
|
||||
value=lambda status, _: bool(status),
|
||||
name="Occupancy",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
entity_class=RpcPresenceBinarySensor,
|
||||
),
|
||||
"presencezone_state": RpcBinarySensorDescription(
|
||||
key="presencezone",
|
||||
sub_key="value",
|
||||
name="Occupancy",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
entity_class=RpcPresenceBinarySensor,
|
||||
),
|
||||
@@ -454,19 +413,6 @@ class BlockBinarySensor(ShellyBlockAttributeEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: BlockBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyBlockCoordinator,
|
||||
block: Block,
|
||||
attribute: str,
|
||||
description: BlockBinarySensorDescription,
|
||||
) -> None:
|
||||
"""Initialize sensor."""
|
||||
super().__init__(coordinator, block, attribute, description)
|
||||
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if sensor state is on."""
|
||||
@@ -478,18 +424,6 @@ class RestBinarySensor(ShellyRestAttributeEntity, BinarySensorEntity):
|
||||
|
||||
entity_description: RestBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyBlockCoordinator,
|
||||
attribute: str,
|
||||
description: RestBinarySensorDescription,
|
||||
) -> None:
|
||||
"""Initialize sensor."""
|
||||
super().__init__(coordinator, attribute, description)
|
||||
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if REST sensor state is on."""
|
||||
@@ -503,20 +437,6 @@ class BlockSleepingBinarySensor(
|
||||
|
||||
entity_description: BlockBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyBlockCoordinator,
|
||||
block: Block | None,
|
||||
attribute: str,
|
||||
description: BlockBinarySensorDescription,
|
||||
entry: RegistryEntry | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sleeping sensor."""
|
||||
super().__init__(coordinator, block, attribute, description, entry)
|
||||
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -541,35 +461,6 @@ class RpcSleepingBinarySensor(
|
||||
|
||||
entity_description: RpcBinarySensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
key: str,
|
||||
attribute: str,
|
||||
description: RpcBinarySensorDescription,
|
||||
entry: RegistryEntry | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sleeping sensor."""
|
||||
super().__init__(coordinator, key, attribute, description, entry)
|
||||
|
||||
if coordinator.device.initialized:
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
translation_placeholders, translation_key = (
|
||||
get_entity_translation_attributes(
|
||||
get_rpc_channel_name(coordinator.device, key),
|
||||
description.translation_key,
|
||||
description.device_class,
|
||||
self._default_to_device_class_name(),
|
||||
)
|
||||
)
|
||||
|
||||
if translation_placeholders:
|
||||
self._attr_translation_placeholders = translation_placeholders
|
||||
if translation_key:
|
||||
self._attr_translation_key = translation_key
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@@ -129,80 +129,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"cable_unplugged": {
|
||||
"name": "Cable unplugged"
|
||||
},
|
||||
"cable_unplugged_with_channel_name": {
|
||||
"name": "{channel_name} cable unplugged"
|
||||
},
|
||||
"calibration": {
|
||||
"name": "Calibration"
|
||||
},
|
||||
"cloud": {
|
||||
"name": "Cloud"
|
||||
},
|
||||
"door": {
|
||||
"name": "Door"
|
||||
},
|
||||
"external_input": {
|
||||
"name": "External input"
|
||||
},
|
||||
"external_power": {
|
||||
"name": "External power"
|
||||
},
|
||||
"flood": {
|
||||
"name": "Flood"
|
||||
},
|
||||
"flood_with_channel_name": {
|
||||
"name": "{channel_name} flood"
|
||||
},
|
||||
"input": {
|
||||
"name": "Input"
|
||||
},
|
||||
"input_with_number": {
|
||||
"name": "Input {input_number}"
|
||||
},
|
||||
"mute": {
|
||||
"name": "Mute"
|
||||
},
|
||||
"mute_with_channel_name": {
|
||||
"name": "{channel_name} mute"
|
||||
},
|
||||
"occupancy_with_channel_name": {
|
||||
"name": "{channel_name} occupancy"
|
||||
},
|
||||
"overcurrent": {
|
||||
"name": "Overcurrent"
|
||||
},
|
||||
"overcurrent_with_channel_name": {
|
||||
"name": "{channel_name} overcurrent"
|
||||
},
|
||||
"overheating": {
|
||||
"name": "Overheating"
|
||||
},
|
||||
"overheating_with_channel_name": {
|
||||
"name": "{channel_name} overheating"
|
||||
},
|
||||
"overpowering": {
|
||||
"name": "Overpowering"
|
||||
},
|
||||
"overpowering_with_channel_name": {
|
||||
"name": "{channel_name} overpowering"
|
||||
},
|
||||
"overvoltage": {
|
||||
"name": "Overvoltage"
|
||||
},
|
||||
"overvoltage_with_channel_name": {
|
||||
"name": "{channel_name} overvoltage"
|
||||
},
|
||||
"restart_required": {
|
||||
"name": "Restart required"
|
||||
},
|
||||
"smoke_with_channel_name": {
|
||||
"name": "{channel_name} smoke"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"calibrate": {
|
||||
"name": "Calibrate"
|
||||
|
||||
@@ -391,13 +391,7 @@ def get_shelly_model_name(
|
||||
return cast(str, MODEL_NAMES.get(model))
|
||||
|
||||
|
||||
def get_rpc_key(value: str) -> tuple[bool, str, str]:
|
||||
"""Get split device key."""
|
||||
parts = value.split(":")
|
||||
return len(parts) > 1, parts[0], parts[-1]
|
||||
|
||||
|
||||
def get_rpc_custom_name(device: RpcDevice, key: str) -> str | None:
|
||||
def get_rpc_component_name(device: RpcDevice, key: str) -> str | None:
|
||||
"""Get component name from device config."""
|
||||
if (
|
||||
key in device.config
|
||||
@@ -409,11 +403,6 @@ def get_rpc_custom_name(device: RpcDevice, key: str) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
def get_rpc_component_name(device: RpcDevice, key: str) -> str | None:
|
||||
"""Get component name from device config."""
|
||||
return get_rpc_custom_name(device, key)
|
||||
|
||||
|
||||
def get_rpc_channel_name(device: RpcDevice, key: str) -> str | None:
|
||||
"""Get name based on device and channel name."""
|
||||
if BLU_TRV_IDENTIFIER in key:
|
||||
@@ -425,11 +414,11 @@ def get_rpc_channel_name(device: RpcDevice, key: str) -> str | None:
|
||||
component = key.split(":")[0]
|
||||
component_id = key.split(":")[-1]
|
||||
|
||||
if custom_name := get_rpc_custom_name(device, key):
|
||||
if component_name := get_rpc_component_name(device, key):
|
||||
if component in (*VIRTUAL_COMPONENTS, "input", "presencezone", "script"):
|
||||
return custom_name
|
||||
return component_name
|
||||
|
||||
return custom_name if instances == 1 else None
|
||||
return component_name if instances == 1 else None
|
||||
|
||||
if component in (*VIRTUAL_COMPONENTS, "input"):
|
||||
return f"{component.title()} {component_id}"
|
||||
|
||||
@@ -9,11 +9,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["soco", "sonos_websocket"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": [
|
||||
"defusedxml==0.7.1",
|
||||
"soco==0.30.12",
|
||||
"sonos-websocket==0.1.3"
|
||||
],
|
||||
"requirements": ["soco==0.30.12", "sonos-websocket==0.1.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:ZonePlayer:1"
|
||||
|
||||
@@ -1,83 +1,33 @@
|
||||
"""Support for Tibber."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp.client_exceptions import ClientError, ClientResponseError
|
||||
import tibber
|
||||
from tibber import data_api as tibber_data_api
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util, ssl as ssl_util
|
||||
|
||||
from .const import (
|
||||
API_TYPE_DATA_API,
|
||||
API_TYPE_GRAPHQL,
|
||||
CONF_API_TYPE,
|
||||
DATA_HASS_CONFIG,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import DATA_HASS_CONFIG, DOMAIN
|
||||
from .services import async_setup_services
|
||||
|
||||
GRAPHQL_PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
DATA_API_PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TibberGraphQLRuntimeData:
|
||||
"""Runtime data for GraphQL-based Tibber entries."""
|
||||
|
||||
tibber: tibber.Tibber
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TibberDataAPIRuntimeData:
|
||||
"""Runtime data for Tibber Data API entries."""
|
||||
|
||||
session: OAuth2Session
|
||||
_client: tibber_data_api.TibberDataAPI | None = None
|
||||
|
||||
async def async_get_client(
|
||||
self, hass: HomeAssistant
|
||||
) -> tibber_data_api.TibberDataAPI:
|
||||
"""Return an authenticated Tibber Data API client."""
|
||||
await self.session.async_ensure_token_valid()
|
||||
token = self.session.token
|
||||
access_token = token.get(CONF_ACCESS_TOKEN)
|
||||
if not access_token:
|
||||
raise ConfigEntryAuthFailed("Access token missing from OAuth session")
|
||||
if self._client is None:
|
||||
self._client = tibber_data_api.TibberDataAPI(
|
||||
access_token,
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
self._client.set_access_token(access_token)
|
||||
return self._client
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Tibber component."""
|
||||
|
||||
hass.data[DATA_HASS_CONFIG] = config
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
@@ -87,100 +37,45 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
api_type = entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL)
|
||||
|
||||
if api_type == API_TYPE_DATA_API:
|
||||
return await _async_setup_data_api_entry(hass, entry)
|
||||
|
||||
return await _async_setup_graphql_entry(hass, entry)
|
||||
|
||||
|
||||
async def _async_setup_graphql_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up the legacy GraphQL Tibber entry."""
|
||||
|
||||
tibber_connection = tibber.Tibber(
|
||||
access_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
websession=async_get_clientsession(hass),
|
||||
time_zone=dt_util.get_default_time_zone(),
|
||||
ssl=ssl_util.get_default_context(),
|
||||
)
|
||||
hass.data[DOMAIN] = tibber_connection
|
||||
|
||||
runtime = TibberGraphQLRuntimeData(tibber_connection)
|
||||
entry.runtime_data = runtime
|
||||
hass.data[DOMAIN][API_TYPE_GRAPHQL] = runtime
|
||||
|
||||
async def _close(_event: Event) -> None:
|
||||
async def _close(event: Event) -> None:
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
try:
|
||||
await tibber_connection.update_info()
|
||||
|
||||
except (
|
||||
TimeoutError,
|
||||
aiohttp.ClientError,
|
||||
tibber.RetryableHttpExceptionError,
|
||||
) as err:
|
||||
raise ConfigEntryNotReady("Unable to connect") from err
|
||||
except tibber.InvalidLoginError as err:
|
||||
_LOGGER.error("Failed to login to Tibber GraphQL API: %s", err)
|
||||
except tibber.InvalidLoginError as exp:
|
||||
_LOGGER.error("Failed to login. %s", exp)
|
||||
return False
|
||||
except tibber.FatalHttpExceptionError as err:
|
||||
_LOGGER.error("Fatal error communicating with Tibber GraphQL API: %s", err)
|
||||
except tibber.FatalHttpExceptionError:
|
||||
return False
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, GRAPHQL_PLATFORMS)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_setup_data_api_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a Tibber Data API entry."""
|
||||
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"OAuth session is not valid, reauthentication required"
|
||||
) from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
runtime = TibberDataAPIRuntimeData(session=session)
|
||||
entry.runtime_data = runtime
|
||||
hass.data[DOMAIN][API_TYPE_DATA_API] = runtime
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, DATA_API_PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
api_type = config_entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL)
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
config_entry,
|
||||
GRAPHQL_PLATFORMS if api_type == API_TYPE_GRAPHQL else DATA_API_PLATFORMS,
|
||||
config_entry, PLATFORMS
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
if api_type == API_TYPE_GRAPHQL:
|
||||
runtime = hass.data[DOMAIN].get(api_type)
|
||||
if runtime:
|
||||
tibber_connection = runtime.tibber
|
||||
await tibber_connection.rt_disconnect()
|
||||
|
||||
hass.data[DOMAIN].pop(api_type, None)
|
||||
tibber_connection = hass.data[DOMAIN]
|
||||
await tibber_connection.rt_disconnect()
|
||||
return unload_ok
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
"""Application credentials platform for Tibber."""
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
AUTHORIZE_URL = "https://thewall.tibber.com/connect/authorize"
|
||||
TOKEN_URL = "https://thewall.tibber.com/connect/token"
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
"""Return authorization server for Tibber Data API."""
|
||||
return AuthorizationServer(
|
||||
authorize_url=AUTHORIZE_URL,
|
||||
token_url=TOKEN_URL,
|
||||
)
|
||||
@@ -2,118 +2,36 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import tibber
|
||||
from tibber.data_api import TibberDataAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AbstractOAuth2FlowHandler,
|
||||
async_get_config_entry_implementation,
|
||||
async_get_implementations,
|
||||
)
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .const import (
|
||||
API_TYPE_DATA_API,
|
||||
API_TYPE_GRAPHQL,
|
||||
CONF_API_TYPE,
|
||||
DATA_API_DEFAULT_SCOPES,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
TYPE_SELECTOR = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_TYPE, default=API_TYPE_GRAPHQL): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[API_TYPE_GRAPHQL, API_TYPE_DATA_API],
|
||||
translation_key="api_type",
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
GRAPHQL_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_ACCESS_TOKEN): str})
|
||||
ERR_TIMEOUT = "timeout"
|
||||
ERR_CLIENT = "cannot_connect"
|
||||
ERR_TOKEN = "invalid_access_token"
|
||||
TOKEN_URL = "https://developer.tibber.com/settings/access-token"
|
||||
DATA_API_DOC_URL = "https://data-api.tibber.com/docs/auth/"
|
||||
APPLICATION_CREDENTIALS_DOC_URL = (
|
||||
"https://www.home-assistant.io/integrations/application_credentials/"
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
class TibberConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Tibber integration."""
|
||||
|
||||
DOMAIN = DOMAIN
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self._api_type: str | None = None
|
||||
self._data_api_home_ids: list[str] = []
|
||||
self._data_api_user_sub: str | None = None
|
||||
self._reauth_confirmed: bool = False
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return the logger."""
|
||||
return _LOGGER
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict:
|
||||
"""Extra data appended to the authorize URL."""
|
||||
if self._api_type != API_TYPE_DATA_API:
|
||||
return super().extra_authorize_data
|
||||
return {
|
||||
**super().extra_authorize_data,
|
||||
"scope": " ".join(DATA_API_DEFAULT_SCOPES),
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=TYPE_SELECTOR,
|
||||
description_placeholders={"url": DATA_API_DOC_URL},
|
||||
)
|
||||
|
||||
self._api_type = user_input[CONF_API_TYPE]
|
||||
|
||||
if self._api_type == API_TYPE_GRAPHQL:
|
||||
return await self.async_step_graphql()
|
||||
|
||||
return await self.async_step_data_api()
|
||||
|
||||
async def async_step_graphql(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle GraphQL token based configuration."""
|
||||
|
||||
if self.source != SOURCE_REAUTH:
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
if entry.entry_id == self.context.get("entry_id"):
|
||||
continue
|
||||
if entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL) == API_TYPE_GRAPHQL:
|
||||
return self.async_abort(reason="already_configured")
|
||||
self._async_abort_entries_match()
|
||||
|
||||
if user_input is not None:
|
||||
access_token = user_input[CONF_ACCESS_TOKEN].replace(" ", "")
|
||||
@@ -140,146 +58,24 @@ class TibberConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="graphql",
|
||||
data_schema=GRAPHQL_SCHEMA,
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
unique_id = tibber_connection.user_id
|
||||
await self.async_set_unique_id(unique_id)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={
|
||||
CONF_API_TYPE: API_TYPE_GRAPHQL,
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
},
|
||||
title=tibber_connection.name,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
data = {
|
||||
CONF_API_TYPE: API_TYPE_GRAPHQL,
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
}
|
||||
|
||||
return self.async_create_entry(
|
||||
title=tibber_connection.name,
|
||||
data=data,
|
||||
data={CONF_ACCESS_TOKEN: access_token},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="graphql",
|
||||
data_schema=GRAPHQL_SCHEMA,
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
description_placeholders={"url": TOKEN_URL},
|
||||
errors={},
|
||||
)
|
||||
|
||||
async def async_step_data_api(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the Data API OAuth configuration."""
|
||||
|
||||
implementations = await async_get_implementations(self.hass, self.DOMAIN)
|
||||
if not implementations:
|
||||
return self.async_abort(
|
||||
reason="missing_credentials",
|
||||
description_placeholders={
|
||||
"application_credentials_url": APPLICATION_CREDENTIALS_DOC_URL,
|
||||
"data_api_url": DATA_API_DOC_URL,
|
||||
},
|
||||
)
|
||||
|
||||
if self.source != SOURCE_REAUTH:
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
if entry.entry_id == self.context.get("entry_id"):
|
||||
continue
|
||||
if entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL) == API_TYPE_DATA_API:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
return await self.async_step_pick_implementation(user_input)
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Finalize the OAuth flow and create the config entry."""
|
||||
|
||||
assert self._api_type == API_TYPE_DATA_API
|
||||
|
||||
token: dict[str, Any] = data["token"]
|
||||
|
||||
client = TibberDataAPI(
|
||||
token[CONF_ACCESS_TOKEN],
|
||||
websession=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
try:
|
||||
userinfo = await client.get_userinfo()
|
||||
except (
|
||||
tibber.InvalidLoginError,
|
||||
tibber.FatalHttpExceptionError,
|
||||
) as err:
|
||||
self.logger.error("Authentication failed against Data API: %s", err)
|
||||
return self.async_abort(reason="oauth_invalid_token")
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
self.logger.error("Error retrieving homes via Data API: %s", err)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
unique_id = userinfo["email"]
|
||||
title = userinfo["email"]
|
||||
await self.async_set_unique_id(unique_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._abort_if_unique_id_mismatch(
|
||||
reason="wrong_account",
|
||||
description_placeholders={"email": reauth_entry.unique_id or ""},
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={
|
||||
CONF_API_TYPE: API_TYPE_DATA_API,
|
||||
"auth_implementation": data["auth_implementation"],
|
||||
CONF_TOKEN: token,
|
||||
},
|
||||
title=title,
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
entry_data: dict[str, Any] = {
|
||||
CONF_API_TYPE: API_TYPE_DATA_API,
|
||||
"auth_implementation": data["auth_implementation"],
|
||||
CONF_TOKEN: token,
|
||||
}
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=entry_data,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication."""
|
||||
|
||||
api_type = entry_data.get(CONF_API_TYPE, API_TYPE_GRAPHQL)
|
||||
self._api_type = api_type
|
||||
|
||||
if api_type == API_TYPE_DATA_API:
|
||||
self.flow_impl = await async_get_config_entry_implementation(
|
||||
self.hass, self._get_reauth_entry()
|
||||
)
|
||||
return await self.async_step_auth()
|
||||
|
||||
self.context["title_placeholders"] = {"name": self._get_reauth_entry().title}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the reauth dialog for GraphQL entries."""
|
||||
if user_input is None and not self._reauth_confirmed:
|
||||
self._reauth_confirmed = True
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
|
||||
return await self.async_step_graphql()
|
||||
|
||||
@@ -3,19 +3,3 @@
|
||||
DATA_HASS_CONFIG = "tibber_hass_config"
|
||||
DOMAIN = "tibber"
|
||||
MANUFACTURER = "Tibber"
|
||||
CONF_API_TYPE = "api_type"
|
||||
API_TYPE_GRAPHQL = "graphql"
|
||||
API_TYPE_DATA_API = "data_api"
|
||||
DATA_API_DEFAULT_SCOPES = [
|
||||
"openid",
|
||||
"profile",
|
||||
"email",
|
||||
"offline_access",
|
||||
"data-api-user-read",
|
||||
"data-api-chargers-read",
|
||||
"data-api-energy-systems-read",
|
||||
"data-api-homes-read",
|
||||
"data-api-thermostats-read",
|
||||
"data-api-vehicles-read",
|
||||
"data-api-inverters-read",
|
||||
]
|
||||
|
||||
@@ -4,11 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
import tibber
|
||||
from tibber.data_api import TibberDataAPI, TibberDevice
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
@@ -24,7 +22,6 @@ from homeassistant.components.recorder.statistics import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import EnergyConverter
|
||||
@@ -190,48 +187,3 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
|
||||
unit_of_measurement=unit,
|
||||
)
|
||||
async_add_external_statistics(self.hass, metadata, statistics)
|
||||
|
||||
|
||||
class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
|
||||
"""Fetch and cache Tibber Data API device capabilities."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
runtime_data: Any,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{DOMAIN} Data API",
|
||||
update_interval=timedelta(minutes=1),
|
||||
config_entry=entry,
|
||||
)
|
||||
self._runtime_data = runtime_data
|
||||
|
||||
async def _async_get_client(self) -> TibberDataAPI:
|
||||
"""Get the Tibber Data API client with error handling."""
|
||||
try:
|
||||
return cast(
|
||||
TibberDataAPI,
|
||||
await self._runtime_data.async_get_client(self.hass),
|
||||
)
|
||||
except ConfigEntryAuthFailed:
|
||||
raise
|
||||
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
|
||||
raise UpdateFailed(
|
||||
f"Unable to create Tibber Data API client: {err}"
|
||||
) from err
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Initial load of Tibber Data API devices."""
|
||||
client = await self._async_get_client()
|
||||
self.data = await client.get_all_devices()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, TibberDevice]:
|
||||
"""Fetch the latest device capabilities from the Tibber Data API."""
|
||||
client = await self._async_get_client()
|
||||
devices: dict[str, TibberDevice] = await client.update_devices()
|
||||
return devices
|
||||
|
||||
@@ -4,80 +4,29 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
import tibber
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
|
||||
from .const import API_TYPE_DATA_API, API_TYPE_GRAPHQL, CONF_API_TYPE, DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
api_type = config_entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL)
|
||||
domain_data = hass.data.get(DOMAIN, {})
|
||||
|
||||
if api_type == API_TYPE_GRAPHQL:
|
||||
tibber_connection: tibber.Tibber = domain_data[API_TYPE_GRAPHQL].tibber
|
||||
return {
|
||||
"api_type": API_TYPE_GRAPHQL,
|
||||
"homes": [
|
||||
{
|
||||
"last_data_timestamp": home.last_data_timestamp,
|
||||
"has_active_subscription": home.has_active_subscription,
|
||||
"has_real_time_consumption": home.has_real_time_consumption,
|
||||
"last_cons_data_timestamp": home.last_cons_data_timestamp,
|
||||
"country": home.country,
|
||||
}
|
||||
for home in tibber_connection.get_homes(only_active=False)
|
||||
],
|
||||
}
|
||||
|
||||
runtime = domain_data.get(API_TYPE_DATA_API)
|
||||
if runtime is None:
|
||||
return {
|
||||
"api_type": API_TYPE_DATA_API,
|
||||
"devices": [],
|
||||
}
|
||||
|
||||
devices: dict[str, Any] = {}
|
||||
error: str | None = None
|
||||
try:
|
||||
devices = await (await runtime.async_get_client(hass)).get_all_devices()
|
||||
except ConfigEntryAuthFailed:
|
||||
devices = {}
|
||||
error = "Authentication failed"
|
||||
except TimeoutError:
|
||||
devices = {}
|
||||
error = "Timeout error"
|
||||
except aiohttp.ClientError:
|
||||
devices = {}
|
||||
error = "Client error"
|
||||
except tibber.InvalidLoginError:
|
||||
devices = {}
|
||||
error = "Invalid login"
|
||||
except tibber.RetryableHttpExceptionError as err:
|
||||
devices = {}
|
||||
error = f"Retryable HTTP error ({err.status})"
|
||||
except tibber.FatalHttpExceptionError as err:
|
||||
devices = {}
|
||||
error = f"Fatal HTTP error ({err.status})"
|
||||
tibber_connection: tibber.Tibber = hass.data[DOMAIN]
|
||||
|
||||
return {
|
||||
"api_type": API_TYPE_DATA_API,
|
||||
"error": error,
|
||||
"devices": [
|
||||
"homes": [
|
||||
{
|
||||
"id": device.id,
|
||||
"name": device.name,
|
||||
"brand": device.brand,
|
||||
"model": device.model,
|
||||
"last_data_timestamp": home.last_data_timestamp,
|
||||
"has_active_subscription": home.has_active_subscription,
|
||||
"has_real_time_consumption": home.has_real_time_consumption,
|
||||
"last_cons_data_timestamp": home.last_cons_data_timestamp,
|
||||
"country": home.country,
|
||||
}
|
||||
for device in devices.values()
|
||||
],
|
||||
for home in tibber_connection.get_homes(only_active=False)
|
||||
]
|
||||
}
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"name": "Tibber",
|
||||
"codeowners": ["@danielhiversen"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials", "recorder"],
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/tibber",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.33.1"]
|
||||
"requirements": ["pyTibber==0.32.2"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import API_TYPE_GRAPHQL, DOMAIN
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -39,7 +39,7 @@ class TibberNotificationEntity(NotifyEntity):
|
||||
|
||||
async def async_send_message(self, message: str, title: str | None = None) -> None:
|
||||
"""Send a message to Tibber devices."""
|
||||
tibber_connection: Tibber = self.hass.data[DOMAIN][API_TYPE_GRAPHQL].tibber
|
||||
tibber_connection: Tibber = self.hass.data[DOMAIN]
|
||||
try:
|
||||
await tibber_connection.send_notification(
|
||||
title or ATTR_TITLE_DEFAULT, message
|
||||
|
||||
@@ -10,8 +10,7 @@ from random import randrange
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from tibber import FatalHttpExceptionError, RetryableHttpExceptionError, TibberHome
|
||||
from tibber.data_api import TibberDevice
|
||||
import tibber
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -28,7 +27,6 @@ from homeassistant.const import (
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfEnergy,
|
||||
UnitOfLength,
|
||||
UnitOfPower,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
@@ -43,14 +41,8 @@ from homeassistant.helpers.update_coordinator import (
|
||||
)
|
||||
from homeassistant.util import Throttle, dt as dt_util
|
||||
|
||||
from .const import (
|
||||
API_TYPE_DATA_API,
|
||||
API_TYPE_GRAPHQL,
|
||||
CONF_API_TYPE,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
)
|
||||
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import TibberDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -268,58 +260,6 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
)
|
||||
|
||||
|
||||
DATA_API_SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="storage.stateOfCharge",
|
||||
translation_key="storage_state_of_charge",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="storage.targetStateOfCharge",
|
||||
translation_key="storage_target_state_of_charge",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="connector.status",
|
||||
translation_key="connector_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["connected", "disconnected", "unknown"],
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="charging.status",
|
||||
translation_key="charging_status",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["charging", "idle", "unknown"],
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="range.remaining",
|
||||
translation_key="range_remaining",
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
native_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="charging.current.max",
|
||||
translation_key="charging_current_max",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="charging.current.offlineFallback",
|
||||
translation_key="charging_current_offline_fallback",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
@@ -327,11 +267,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Tibber sensor."""
|
||||
|
||||
if entry.data.get(CONF_API_TYPE, API_TYPE_GRAPHQL) == API_TYPE_DATA_API:
|
||||
await _async_setup_data_api_sensors(hass, entry, async_add_entities)
|
||||
return
|
||||
|
||||
tibber_connection = hass.data[DOMAIN][API_TYPE_GRAPHQL].tibber
|
||||
tibber_connection = hass.data[DOMAIN]
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
@@ -344,11 +280,7 @@ async def async_setup_entry(
|
||||
except TimeoutError as err:
|
||||
_LOGGER.error("Timeout connecting to Tibber home: %s ", err)
|
||||
raise PlatformNotReady from err
|
||||
except (
|
||||
RetryableHttpExceptionError,
|
||||
FatalHttpExceptionError,
|
||||
aiohttp.ClientError,
|
||||
) as err:
|
||||
except (tibber.RetryableHttpExceptionError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error connecting to Tibber home: %s ", err)
|
||||
raise PlatformNotReady from err
|
||||
|
||||
@@ -396,94 +328,14 @@ async def async_setup_entry(
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
async def _async_setup_data_api_sensors(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors backed by the Tibber Data API."""
|
||||
|
||||
domain_data = hass.data.get(DOMAIN, {})
|
||||
runtime = domain_data[API_TYPE_DATA_API]
|
||||
|
||||
coordinator = TibberDataAPICoordinator(hass, entry, runtime)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entities: list[TibberDataAPISensor] = []
|
||||
api_sensors = {sensor.key: sensor for sensor in DATA_API_SENSORS}
|
||||
|
||||
for device in coordinator.data.values():
|
||||
for sensor in device.sensors:
|
||||
description: SensorEntityDescription | None = api_sensors.get(sensor.id)
|
||||
if description is None:
|
||||
_LOGGER.error("Sensor %s not found", sensor)
|
||||
continue
|
||||
entities.append(
|
||||
TibberDataAPISensor(
|
||||
coordinator, device, description, sensor.description
|
||||
)
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class TibberDataAPISensor(CoordinatorEntity[TibberDataAPICoordinator], SensorEntity):
|
||||
"""Representation of a Tibber Data API capability sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: TibberDataAPICoordinator,
|
||||
device: TibberDevice,
|
||||
entity_description: SensorEntityDescription,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._device_id: str = device.id
|
||||
self.entity_description = entity_description
|
||||
self._attr_name = name
|
||||
|
||||
self._attr_unique_id = f"{device.external_id}_{self.entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.external_id)},
|
||||
name=device.name,
|
||||
manufacturer=device.brand,
|
||||
model=device.model,
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> StateType:
|
||||
"""Return the value reported by the device."""
|
||||
device = self.coordinator.data.get(self._device_id)
|
||||
if device is None:
|
||||
return None
|
||||
|
||||
for sensor in device.sensors:
|
||||
if sensor.id == self.entity_description.key:
|
||||
return sensor.value
|
||||
return None
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return whether the sensor is available."""
|
||||
device = self.coordinator.data.get(self._device_id)
|
||||
if device is None:
|
||||
return False
|
||||
return self.native_value is not None
|
||||
|
||||
|
||||
class TibberSensor(SensorEntity):
|
||||
"""Representation of a generic Tibber sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, *args: Any, tibber_home: TibberHome, **kwargs: Any) -> None:
|
||||
def __init__(
|
||||
self, *args: Any, tibber_home: tibber.TibberHome, **kwargs: Any
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(*args, **kwargs)
|
||||
self._tibber_home = tibber_home
|
||||
@@ -514,7 +366,7 @@ class TibberSensorElPrice(TibberSensor):
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_translation_key = "electricity_price"
|
||||
|
||||
def __init__(self, tibber_home: TibberHome) -> None:
|
||||
def __init__(self, tibber_home: tibber.TibberHome) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(tibber_home=tibber_home)
|
||||
self._last_updated: datetime.datetime | None = None
|
||||
@@ -591,7 +443,7 @@ class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tibber_home: TibberHome,
|
||||
tibber_home: tibber.TibberHome,
|
||||
coordinator: TibberDataCoordinator,
|
||||
entity_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
@@ -618,7 +470,7 @@ class TibberSensorRT(TibberSensor, CoordinatorEntity["TibberRtDataCoordinator"])
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tibber_home: TibberHome,
|
||||
tibber_home: tibber.TibberHome,
|
||||
description: SensorEntityDescription,
|
||||
initial_state: float,
|
||||
coordinator: TibberRtDataCoordinator,
|
||||
@@ -680,7 +532,7 @@ class TibberRtEntityCreator:
|
||||
def __init__(
|
||||
self,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
tibber_home: TibberHome,
|
||||
tibber_home: tibber.TibberHome,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Initialize the data handler."""
|
||||
@@ -766,7 +618,7 @@ class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-en
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
add_sensor_callback: Callable[[TibberRtDataCoordinator, Any], None],
|
||||
tibber_home: TibberHome,
|
||||
tibber_home: tibber.TibberHome,
|
||||
) -> None:
|
||||
"""Initialize the data handler."""
|
||||
self._add_sensor_callback = add_sensor_callback
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import API_TYPE_GRAPHQL, DOMAIN
|
||||
from .const import DOMAIN
|
||||
|
||||
PRICE_SERVICE_NAME = "get_prices"
|
||||
ATTR_START: Final = "start"
|
||||
@@ -33,15 +33,7 @@ SERVICE_SCHEMA: Final = vol.Schema(
|
||||
|
||||
|
||||
async def __get_prices(call: ServiceCall) -> ServiceResponse:
|
||||
domain_data = call.hass.data.get(DOMAIN, {})
|
||||
runtime = domain_data.get(API_TYPE_GRAPHQL)
|
||||
if runtime is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="graphql_required",
|
||||
)
|
||||
|
||||
tibber_connection = runtime.tibber
|
||||
tibber_connection = call.hass.data[DOMAIN]
|
||||
|
||||
start = __get_date(call.data.get(ATTR_START), "start")
|
||||
end = __get_date(call.data.get(ATTR_END), "end")
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "Add Tibber Data API application credentials under application credentials before continuing. See {application_credentials_url} for guidance and {data_api_url} for API documentation.",
|
||||
"oauth_invalid_token": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "The connected account does not match {email}. Sign in with the same Tibber account and try again."
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -15,21 +9,11 @@
|
||||
"timeout": "[%key:common::config_flow::error::timeout_connect%]"
|
||||
},
|
||||
"step": {
|
||||
"graphql": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "[%key:common::config_flow::data::access_token%]"
|
||||
},
|
||||
"description": "Enter your access token from {url}"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Reconnect your Tibber account to refresh access.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_type": "API type"
|
||||
},
|
||||
"description": "Select which Tibber API you want to configure. See {url} for documentation."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -56,37 +40,6 @@
|
||||
"average_power": {
|
||||
"name": "Average power"
|
||||
},
|
||||
"battery_battery_power": {
|
||||
"name": "Battery power"
|
||||
},
|
||||
"battery_battery_state_of_charge": {
|
||||
"name": "Battery state of charge"
|
||||
},
|
||||
"battery_stored_energy": {
|
||||
"name": "Stored energy"
|
||||
},
|
||||
"charging_current_max": {
|
||||
"name": "Maximum charging current"
|
||||
},
|
||||
"charging_current_offline_fallback": {
|
||||
"name": "Offline fallback charging current"
|
||||
},
|
||||
"charging_status": {
|
||||
"name": "Charging status",
|
||||
"state": {
|
||||
"charging": "Charging",
|
||||
"idle": "Idle",
|
||||
"unknown": "Unknown"
|
||||
}
|
||||
},
|
||||
"connector_status": {
|
||||
"name": "Connector status",
|
||||
"state": {
|
||||
"connected": "Connected",
|
||||
"disconnected": "Disconnected",
|
||||
"unknown": "Unknown"
|
||||
}
|
||||
},
|
||||
"current_l1": {
|
||||
"name": "Current L1"
|
||||
},
|
||||
@@ -102,30 +55,6 @@
|
||||
"estimated_hour_consumption": {
|
||||
"name": "Estimated consumption current hour"
|
||||
},
|
||||
"ev_charger_charge_current": {
|
||||
"name": "Charge current"
|
||||
},
|
||||
"ev_charger_charging_state": {
|
||||
"name": "Charging state"
|
||||
},
|
||||
"ev_charger_power": {
|
||||
"name": "Charging power"
|
||||
},
|
||||
"ev_charger_session_energy": {
|
||||
"name": "Session energy"
|
||||
},
|
||||
"ev_charger_total_energy": {
|
||||
"name": "Total energy"
|
||||
},
|
||||
"heat_pump_measured_temperature": {
|
||||
"name": "Measured temperature"
|
||||
},
|
||||
"heat_pump_operation_mode": {
|
||||
"name": "Operation mode"
|
||||
},
|
||||
"heat_pump_target_temperature": {
|
||||
"name": "Target temperature"
|
||||
},
|
||||
"last_meter_consumption": {
|
||||
"name": "Last meter consumption"
|
||||
},
|
||||
@@ -159,33 +88,9 @@
|
||||
"power_production": {
|
||||
"name": "Power production"
|
||||
},
|
||||
"range_remaining": {
|
||||
"name": "Remaining range"
|
||||
},
|
||||
"signal_strength": {
|
||||
"name": "Signal strength"
|
||||
},
|
||||
"solar_power": {
|
||||
"name": "Solar power"
|
||||
},
|
||||
"solar_power_production": {
|
||||
"name": "Power production"
|
||||
},
|
||||
"storage_state_of_charge": {
|
||||
"name": "Storage state of charge"
|
||||
},
|
||||
"storage_target_state_of_charge": {
|
||||
"name": "Storage target state of charge"
|
||||
},
|
||||
"thermostat_measured_temperature": {
|
||||
"name": "Measured temperature"
|
||||
},
|
||||
"thermostat_operation_mode": {
|
||||
"name": "Operation mode"
|
||||
},
|
||||
"thermostat_target_temperature": {
|
||||
"name": "Target temperature"
|
||||
},
|
||||
"voltage_phase1": {
|
||||
"name": "Voltage phase1"
|
||||
},
|
||||
@@ -198,27 +103,13 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"graphql_required": {
|
||||
"message": "Configure the Tibber GraphQL API before calling this service."
|
||||
},
|
||||
"invalid_date": {
|
||||
"message": "Invalid datetime provided {date}"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"send_message_timeout": {
|
||||
"message": "Timeout sending message with Tibber"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"api_type": {
|
||||
"options": {
|
||||
"data_api": "Data API (OAuth2)",
|
||||
"graphql": "GraphQL API (access token)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"get_prices": {
|
||||
"description": "Fetches hourly energy prices including price level.",
|
||||
|
||||
@@ -32,82 +32,12 @@ from .entity import TuyaEntity
|
||||
from .models import (
|
||||
DPCodeBooleanWrapper,
|
||||
DPCodeEnumWrapper,
|
||||
DPCodeIntegerWrapper,
|
||||
IntegerTypeData,
|
||||
find_dpcode,
|
||||
)
|
||||
from .util import get_dpcode, get_dptype, remap_value
|
||||
|
||||
|
||||
class _BrightnessWrapper(DPCodeIntegerWrapper):
|
||||
"""Wrapper for brightness DP code.
|
||||
|
||||
Handles brightness value conversion between device scale and Home Assistant's
|
||||
0-255 scale. Supports optional dynamic brightness_min and brightness_max
|
||||
wrappers that allow the device to specify runtime brightness range limits.
|
||||
"""
|
||||
|
||||
brightness_min: DPCodeIntegerWrapper | None = None
|
||||
brightness_max: DPCodeIntegerWrapper | None = None
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> Any | None:
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
if (brightness := self._read_device_status_raw(device)) is None:
|
||||
return None
|
||||
|
||||
# Remap value to our scale
|
||||
brightness = self.type_information.remap_value_to(brightness)
|
||||
|
||||
# If there is a min/max value, the brightness is actually limited.
|
||||
# Meaning it is actually not on a 0-255 scale.
|
||||
if (
|
||||
self.brightness_max is not None
|
||||
and self.brightness_min is not None
|
||||
and (brightness_max := device.status.get(self.brightness_max.dpcode))
|
||||
is not None
|
||||
and (brightness_min := device.status.get(self.brightness_min.dpcode))
|
||||
is not None
|
||||
):
|
||||
# Remap values onto our scale
|
||||
brightness_max = self.brightness_max.type_information.remap_value_to(
|
||||
brightness_max
|
||||
)
|
||||
brightness_min = self.brightness_min.type_information.remap_value_to(
|
||||
brightness_min
|
||||
)
|
||||
|
||||
# Remap the brightness value from their min-max to our 0-255 scale
|
||||
brightness = remap_value(
|
||||
brightness, from_min=brightness_min, from_max=brightness_max
|
||||
)
|
||||
|
||||
return round(brightness)
|
||||
|
||||
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
|
||||
"""Convert a Home Assistant value (0..255) back to a raw device value."""
|
||||
# If there is a min/max value, the brightness is actually limited.
|
||||
# Meaning it is actually not on a 0-255 scale.
|
||||
if (
|
||||
self.brightness_max is not None
|
||||
and self.brightness_min is not None
|
||||
and (brightness_max := device.status.get(self.brightness_max.dpcode))
|
||||
is not None
|
||||
and (brightness_min := device.status.get(self.brightness_min.dpcode))
|
||||
is not None
|
||||
):
|
||||
# Remap values onto our scale
|
||||
brightness_max = self.brightness_max.type_information.remap_value_to(
|
||||
brightness_max
|
||||
)
|
||||
brightness_min = self.brightness_min.type_information.remap_value_to(
|
||||
brightness_min
|
||||
)
|
||||
|
||||
# Remap the brightness value from our 0-255 scale to their min-max
|
||||
value = remap_value(value, to_min=brightness_min, to_max=brightness_max)
|
||||
return round(self.type_information.remap_value_from(value))
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColorTypeData:
|
||||
"""Color Type Data."""
|
||||
@@ -487,24 +417,6 @@ class ColorData:
|
||||
return round(self.type_data.v_type.remap_value_to(self.v_value, 0, 255))
|
||||
|
||||
|
||||
def _get_brightness_wrapper(
|
||||
device: CustomerDevice, description: TuyaLightEntityDescription
|
||||
) -> _BrightnessWrapper | None:
|
||||
if (
|
||||
brightness_wrapper := _BrightnessWrapper.find_dpcode(
|
||||
device, description.brightness, prefer_function=True
|
||||
)
|
||||
) is None:
|
||||
return None
|
||||
brightness_wrapper.brightness_max = DPCodeIntegerWrapper.find_dpcode(
|
||||
device, description.brightness_max, prefer_function=True
|
||||
)
|
||||
brightness_wrapper.brightness_min = DPCodeIntegerWrapper.find_dpcode(
|
||||
device, description.brightness_min, prefer_function=True
|
||||
)
|
||||
return brightness_wrapper
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TuyaConfigEntry,
|
||||
@@ -525,7 +437,6 @@ async def async_setup_entry(
|
||||
device,
|
||||
manager,
|
||||
description,
|
||||
brightness_wrapper=_get_brightness_wrapper(device, description),
|
||||
color_mode_wrapper=DPCodeEnumWrapper.find_dpcode(
|
||||
device, description.color_mode, prefer_function=True
|
||||
),
|
||||
@@ -553,6 +464,9 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
|
||||
entity_description: TuyaLightEntityDescription
|
||||
|
||||
_brightness_max: IntegerTypeData | None = None
|
||||
_brightness_min: IntegerTypeData | None = None
|
||||
_brightness: IntegerTypeData | None = None
|
||||
_color_data_dpcode: DPCode | None = None
|
||||
_color_data_type: ColorTypeData | None = None
|
||||
_color_temp: IntegerTypeData | None = None
|
||||
@@ -567,7 +481,6 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
device_manager: Manager,
|
||||
description: TuyaLightEntityDescription,
|
||||
*,
|
||||
brightness_wrapper: DPCodeIntegerWrapper | None,
|
||||
color_mode_wrapper: DPCodeEnumWrapper | None,
|
||||
switch_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
@@ -575,14 +488,25 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
super().__init__(device, device_manager)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._brightness_wrapper = brightness_wrapper
|
||||
self._color_mode_wrapper = color_mode_wrapper
|
||||
self._switch_wrapper = switch_wrapper
|
||||
|
||||
color_modes: set[ColorMode] = {ColorMode.ONOFF}
|
||||
|
||||
if brightness_wrapper:
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.brightness,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
):
|
||||
self._brightness = int_type
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
self._brightness_max = find_dpcode(
|
||||
self.device, description.brightness_max, dptype=DPType.INTEGER
|
||||
)
|
||||
self._brightness_min = find_dpcode(
|
||||
self.device, description.brightness_min, dptype=DPType.INTEGER
|
||||
)
|
||||
|
||||
if (dpcode := get_dpcode(self.device, description.color_data)) and (
|
||||
get_dptype(self.device, dpcode, prefer_function=True) == DPType.JSON
|
||||
@@ -605,8 +529,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
# If no type is found, use a default one
|
||||
self._color_data_type = self.entity_description.default_color_type
|
||||
if self._color_data_dpcode == DPCode.COLOUR_DATA_V2 or (
|
||||
self._brightness_wrapper
|
||||
and self._brightness_wrapper.type_information.max > 255
|
||||
self._brightness and self._brightness.max > 255
|
||||
):
|
||||
self._color_data_type = DEFAULT_COLOR_TYPE_DATA_V2
|
||||
|
||||
@@ -718,16 +641,46 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
},
|
||||
]
|
||||
|
||||
elif self._brightness_wrapper and (
|
||||
ATTR_BRIGHTNESS in kwargs or ATTR_WHITE in kwargs
|
||||
):
|
||||
elif self._brightness and (ATTR_BRIGHTNESS in kwargs or ATTR_WHITE in kwargs):
|
||||
if ATTR_BRIGHTNESS in kwargs:
|
||||
brightness = kwargs[ATTR_BRIGHTNESS]
|
||||
else:
|
||||
brightness = kwargs[ATTR_WHITE]
|
||||
|
||||
# If there is a min/max value, the brightness is actually limited.
|
||||
# Meaning it is actually not on a 0-255 scale.
|
||||
if (
|
||||
self._brightness_max is not None
|
||||
and self._brightness_min is not None
|
||||
and (
|
||||
brightness_max := self.device.status.get(
|
||||
self._brightness_max.dpcode
|
||||
)
|
||||
)
|
||||
is not None
|
||||
and (
|
||||
brightness_min := self.device.status.get(
|
||||
self._brightness_min.dpcode
|
||||
)
|
||||
)
|
||||
is not None
|
||||
):
|
||||
# Remap values onto our scale
|
||||
brightness_max = self._brightness_max.remap_value_to(brightness_max)
|
||||
brightness_min = self._brightness_min.remap_value_to(brightness_min)
|
||||
|
||||
# Remap the brightness value from their min-max to our 0-255 scale
|
||||
brightness = remap_value(
|
||||
brightness,
|
||||
to_min=brightness_min,
|
||||
to_max=brightness_max,
|
||||
)
|
||||
|
||||
commands += [
|
||||
self._brightness_wrapper.get_update_command(self.device, brightness),
|
||||
{
|
||||
"code": self._brightness.dpcode,
|
||||
"value": round(self._brightness.remap_value_from(brightness)),
|
||||
},
|
||||
]
|
||||
|
||||
self._send_command(commands)
|
||||
@@ -738,12 +691,43 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
|
||||
@property
|
||||
def brightness(self) -> int | None:
|
||||
"""Return the brightness of this light between 0..255."""
|
||||
"""Return the brightness of the light."""
|
||||
# If the light is currently in color mode, extract the brightness from the color data
|
||||
if self.color_mode == ColorMode.HS and (color_data := self._get_color_data()):
|
||||
return color_data.brightness
|
||||
|
||||
return self._read_wrapper(self._brightness_wrapper)
|
||||
if not self._brightness:
|
||||
return None
|
||||
|
||||
brightness = self.device.status.get(self._brightness.dpcode)
|
||||
if brightness is None:
|
||||
return None
|
||||
|
||||
# Remap value to our scale
|
||||
brightness = self._brightness.remap_value_to(brightness)
|
||||
|
||||
# If there is a min/max value, the brightness is actually limited.
|
||||
# Meaning it is actually not on a 0-255 scale.
|
||||
if (
|
||||
self._brightness_max is not None
|
||||
and self._brightness_min is not None
|
||||
and (brightness_max := self.device.status.get(self._brightness_max.dpcode))
|
||||
is not None
|
||||
and (brightness_min := self.device.status.get(self._brightness_min.dpcode))
|
||||
is not None
|
||||
):
|
||||
# Remap values onto our scale
|
||||
brightness_max = self._brightness_max.remap_value_to(brightness_max)
|
||||
brightness_min = self._brightness_min.remap_value_to(brightness_min)
|
||||
|
||||
# Remap the brightness value from their min-max to our 0-255 scale
|
||||
brightness = remap_value(
|
||||
brightness,
|
||||
from_min=brightness_min,
|
||||
from_max=brightness_max,
|
||||
)
|
||||
|
||||
return round(brightness)
|
||||
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
|
||||
@@ -130,7 +130,7 @@ async def build_item_response(
|
||||
)
|
||||
|
||||
|
||||
def item_payload(item: InstalledPackage, images: dict[str, list[Image]]) -> BrowseMedia:
|
||||
def item_payload(item: InstalledPackage, images: dict[str, list[Image]]):
|
||||
"""Create response payload for a single media item."""
|
||||
thumbnail = None
|
||||
image = _find_media_image(images.get(item.one_store_product_id, [])) # type: ignore[arg-type]
|
||||
|
||||
@@ -335,7 +335,7 @@ class XboxStorageDeviceSensorEntity(
|
||||
)
|
||||
|
||||
@property
|
||||
def data(self) -> StorageDevice | None:
|
||||
def data(self):
|
||||
"""Storage device data."""
|
||||
consoles = self.coordinator.data.result
|
||||
console = next((c for c in consoles if c.id == self._console.id), None)
|
||||
|
||||
@@ -37,7 +37,6 @@ APPLICATION_CREDENTIALS = [
|
||||
"smartthings",
|
||||
"spotify",
|
||||
"tesla_fleet",
|
||||
"tibber",
|
||||
"twitch",
|
||||
"volvo",
|
||||
"weheat",
|
||||
|
||||
@@ -3418,7 +3418,7 @@
|
||||
"name": "LCN",
|
||||
"integration_type": "hub",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"ld2410_ble": {
|
||||
"name": "LD2410 BLE",
|
||||
|
||||
@@ -30,7 +30,7 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.3
|
||||
cronsim==2.7
|
||||
cryptography==46.0.2
|
||||
dbus-fast==3.0.0
|
||||
dbus-fast==2.45.0
|
||||
file-read-backwards==2.0.0
|
||||
fnv-hash-fast==1.6.0
|
||||
go2rtc-client==0.2.1
|
||||
|
||||
10
mypy.ini
generated
10
mypy.ini
generated
@@ -5549,16 +5549,6 @@ disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.xbox.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_untyped_defs = true
|
||||
warn_return_any = true
|
||||
warn_unreachable = true
|
||||
|
||||
[mypy-homeassistant.components.xiaomi_ble.*]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
|
||||
13
requirements_all.txt
generated
13
requirements_all.txt
generated
@@ -500,7 +500,7 @@ anova-wifi==0.17.0
|
||||
anthemav==1.4.1
|
||||
|
||||
# homeassistant.components.anthropic
|
||||
anthropic==0.73.0
|
||||
anthropic==0.69.0
|
||||
|
||||
# homeassistant.components.mcp_server
|
||||
anyio==4.10.0
|
||||
@@ -772,7 +772,7 @@ datadog==0.52.0
|
||||
datapoint==0.12.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==3.0.0
|
||||
dbus-fast==2.45.0
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.16
|
||||
@@ -786,7 +786,6 @@ deebot-client==16.3.0
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
# homeassistant.components.ohmconnect
|
||||
# homeassistant.components.sonos
|
||||
defusedxml==0.7.1
|
||||
|
||||
# homeassistant.components.deluge
|
||||
@@ -1077,7 +1076,7 @@ google-genai==1.38.0
|
||||
google-maps-routing==0.6.15
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==9.0.1
|
||||
google-nest-sdm==7.1.4
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -1839,7 +1838,7 @@ pyRFXtrx==0.31.1
|
||||
pySDCP==1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.33.1
|
||||
pyTibber==0.32.2
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -2063,7 +2062,7 @@ pyhomeworks==1.1.2
|
||||
pyialarm==2.2.0
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==2.2.0
|
||||
pyicloud==2.1.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.6.3
|
||||
@@ -2269,7 +2268,7 @@ pypaperless==4.1.1
|
||||
pypca==0.0.7
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.2
|
||||
pypck==0.8.12
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
|
||||
13
requirements_test_all.txt
generated
13
requirements_test_all.txt
generated
@@ -473,7 +473,7 @@ anova-wifi==0.17.0
|
||||
anthemav==1.4.1
|
||||
|
||||
# homeassistant.components.anthropic
|
||||
anthropic==0.73.0
|
||||
anthropic==0.69.0
|
||||
|
||||
# homeassistant.components.mcp_server
|
||||
anyio==4.10.0
|
||||
@@ -675,7 +675,7 @@ datadog==0.52.0
|
||||
datapoint==0.12.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
dbus-fast==3.0.0
|
||||
dbus-fast==2.45.0
|
||||
|
||||
# homeassistant.components.debugpy
|
||||
debugpy==1.8.16
|
||||
@@ -686,7 +686,6 @@ deebot-client==16.3.0
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
# homeassistant.components.ohmconnect
|
||||
# homeassistant.components.sonos
|
||||
defusedxml==0.7.1
|
||||
|
||||
# homeassistant.components.deluge
|
||||
@@ -944,7 +943,7 @@ google-genai==1.38.0
|
||||
google-maps-routing==0.6.15
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==9.0.1
|
||||
google-nest-sdm==7.1.4
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -1549,7 +1548,7 @@ pyHomee==1.3.8
|
||||
pyRFXtrx==0.31.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.33.1
|
||||
pyTibber==0.32.2
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -1722,7 +1721,7 @@ pyhomeworks==1.1.2
|
||||
pyialarm==2.2.0
|
||||
|
||||
# homeassistant.components.icloud
|
||||
pyicloud==2.2.0
|
||||
pyicloud==2.1.0
|
||||
|
||||
# homeassistant.components.insteon
|
||||
pyinsteon==1.6.3
|
||||
@@ -1892,7 +1891,7 @@ pypalazzetti==0.1.20
|
||||
pypaperless==4.1.1
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.2
|
||||
pypck==0.8.12
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
|
||||
@@ -428,6 +428,7 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"gogogate2",
|
||||
"goodwe",
|
||||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_domains",
|
||||
"google_generative_ai_conversation",
|
||||
@@ -1442,6 +1443,7 @@ INTEGRATIONS_WITHOUT_SCALE = [
|
||||
"goodwe",
|
||||
"google",
|
||||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_domains",
|
||||
"google_generative_ai_conversation",
|
||||
|
||||
@@ -1,25 +1 @@
|
||||
"""Tests for the AdGuard Home integration."""
|
||||
|
||||
from homeassistant.const import CONTENT_TYPE_JSON
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def setup_integration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Fixture for setting up the component."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
aioclient_mock.get(
|
||||
"https://127.0.0.1:3000/control/status",
|
||||
json={"version": "v0.107.50"},
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
"""Common fixtures for the adguard tests."""
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.adguard import DOMAIN
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock a config entry."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_HOST: "127.0.0.1",
|
||||
CONF_PORT: 3000,
|
||||
CONF_USERNAME: "user",
|
||||
CONF_PASSWORD: "pass",
|
||||
CONF_SSL: True,
|
||||
CONF_VERIFY_SSL: True,
|
||||
},
|
||||
title="AdGuard Home",
|
||||
)
|
||||
@@ -1,61 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_update[update.adguard_home-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'update',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'update.adguard_home',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'adguard',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <UpdateEntityFeature: 1>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'adguard_127.0.0.1_3000_update',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_update[update.adguard_home-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'auto_update': False,
|
||||
'display_precision': 0,
|
||||
'entity_picture': 'https://brands.home-assistant.io/_/adguard/icon.png',
|
||||
'friendly_name': 'AdGuard Home',
|
||||
'in_progress': False,
|
||||
'installed_version': 'v0.107.50',
|
||||
'latest_version': 'v0.107.59',
|
||||
'release_summary': 'AdGuard Home v0.107.59 is now available!',
|
||||
'release_url': 'https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59',
|
||||
'skipped_version': None,
|
||||
'supported_features': <UpdateEntityFeature: 1>,
|
||||
'title': None,
|
||||
'update_percentage': None,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'update.adguard_home',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
@@ -1,138 +0,0 @@
|
||||
"""Tests for the AdGuard Home update entity."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from adguardhome import AdGuardHomeError
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import CONTENT_TYPE_JSON, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
async def test_update(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the adguard update platform."""
|
||||
aioclient_mock.post(
|
||||
"https://127.0.0.1:3000/control/version.json",
|
||||
json={
|
||||
"new_version": "v0.107.59",
|
||||
"announcement": "AdGuard Home v0.107.59 is now available!",
|
||||
"announcement_url": "https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59",
|
||||
"can_autoupdate": True,
|
||||
"disabled": False,
|
||||
},
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
|
||||
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
|
||||
await setup_integration(hass, mock_config_entry, aioclient_mock)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
async def test_update_disabled(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the adguard update is disabled."""
|
||||
aioclient_mock.post(
|
||||
"https://127.0.0.1:3000/control/version.json",
|
||||
json={"disabled": True},
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
|
||||
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
|
||||
await setup_integration(hass, mock_config_entry, aioclient_mock)
|
||||
|
||||
assert not hass.states.async_all()
|
||||
|
||||
|
||||
async def test_update_install(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the adguard update installation."""
|
||||
aioclient_mock.post(
|
||||
"https://127.0.0.1:3000/control/version.json",
|
||||
json={
|
||||
"new_version": "v0.107.59",
|
||||
"announcement": "AdGuard Home v0.107.59 is now available!",
|
||||
"announcement_url": "https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59",
|
||||
"can_autoupdate": True,
|
||||
"disabled": False,
|
||||
},
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
aioclient_mock.post("https://127.0.0.1:3000/control/update")
|
||||
|
||||
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
|
||||
await setup_integration(hass, mock_config_entry, aioclient_mock)
|
||||
|
||||
aioclient_mock.mock_calls.clear()
|
||||
|
||||
await hass.services.async_call(
|
||||
"update",
|
||||
"install",
|
||||
{"entity_id": "update.adguard_home"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert aioclient_mock.mock_calls[0][0] == "POST"
|
||||
assert (
|
||||
str(aioclient_mock.mock_calls[0][1]) == "https://127.0.0.1:3000/control/update"
|
||||
)
|
||||
|
||||
|
||||
async def test_update_install_failed(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the adguard update install failed."""
|
||||
aioclient_mock.post(
|
||||
"https://127.0.0.1:3000/control/version.json",
|
||||
json={
|
||||
"new_version": "v0.107.59",
|
||||
"announcement": "AdGuard Home v0.107.59 is now available!",
|
||||
"announcement_url": "https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59",
|
||||
"can_autoupdate": True,
|
||||
"disabled": False,
|
||||
},
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
aioclient_mock.post(
|
||||
"https://127.0.0.1:3000/control/update", exc=AdGuardHomeError("boom")
|
||||
)
|
||||
|
||||
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
|
||||
await setup_integration(hass, mock_config_entry, aioclient_mock)
|
||||
|
||||
aioclient_mock.mock_calls.clear()
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
"update",
|
||||
"install",
|
||||
{"entity_id": "update.adguard_home"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
assert aioclient_mock.mock_calls[0][0] == "POST"
|
||||
assert (
|
||||
str(aioclient_mock.mock_calls[0][1]) == "https://127.0.0.1:3000/control/update"
|
||||
)
|
||||
@@ -1,16 +1,74 @@
|
||||
"""Tests for GIOS."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.gios.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_load_json_array_fixture,
|
||||
async_load_json_object_fixture,
|
||||
)
|
||||
|
||||
STATIONS = [
|
||||
{
|
||||
"Identyfikator stacji": 123,
|
||||
"Nazwa stacji": "Test Name 1",
|
||||
"WGS84 φ N": "99.99",
|
||||
"WGS84 λ E": "88.88",
|
||||
},
|
||||
{
|
||||
"Identyfikator stacji": 321,
|
||||
"Nazwa stacji": "Test Name 2",
|
||||
"WGS84 φ N": "77.77",
|
||||
"WGS84 λ E": "66.66",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
async def setup_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Set up the GIOS integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
async def init_integration(
|
||||
hass: HomeAssistant, incomplete_data=False, invalid_indexes=False
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the GIOS integration in Home Assistant."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="Home",
|
||||
unique_id="123",
|
||||
data={"station_id": 123, "name": "Home"},
|
||||
entry_id="86129426118ae32020417a53712d6eef",
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
indexes = await async_load_json_object_fixture(hass, "indexes.json", DOMAIN)
|
||||
station = await async_load_json_array_fixture(hass, "station.json", DOMAIN)
|
||||
sensors = await async_load_json_object_fixture(hass, "sensors.json", DOMAIN)
|
||||
if incomplete_data:
|
||||
indexes["AqIndex"] = "foo"
|
||||
sensors["pm10"]["Lista danych pomiarowych"][0]["Wartość"] = None
|
||||
sensors["pm10"]["Lista danych pomiarowych"][1]["Wartość"] = None
|
||||
if invalid_indexes:
|
||||
indexes = {}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_station",
|
||||
return_value=station,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
return_value=sensors,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_indexes",
|
||||
return_value=indexes,
|
||||
),
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return entry
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
"""Fixtures for GIOS integration tests."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from gios.model import GiosSensors, GiosStation, Sensor as GiosSensor
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.gios.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="Home",
|
||||
unique_id="123",
|
||||
data={"station_id": 123, "name": "Home"},
|
||||
entry_id="86129426118ae32020417a53712d6eef",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gios_sensors() -> GiosSensors:
|
||||
"""Return the default mocked gios sensors."""
|
||||
return GiosSensors(
|
||||
aqi=GiosSensor(name="AQI", id=None, index=None, value="good"),
|
||||
c6h6=GiosSensor(name="benzene", id=658, index="very_good", value=0.23789),
|
||||
co=GiosSensor(name="carbon monoxide", id=660, index="good", value=251.874),
|
||||
no=GiosSensor(name="nitrogen monoxide", id=664, index=None, value=5.1),
|
||||
no2=GiosSensor(name="nitrogen dioxide", id=665, index="good", value=7.13411),
|
||||
nox=GiosSensor(name="nitrogen oxides", id=666, index=None, value=5.5),
|
||||
o3=GiosSensor(name="ozone", id=667, index="good", value=95.7768),
|
||||
pm10=GiosSensor(
|
||||
name="particulate matter 10", id=14395, index="good", value=16.8344
|
||||
),
|
||||
pm25=GiosSensor(name="particulate matter 2.5", id=670, index="good", value=4),
|
||||
so2=GiosSensor(name="sulfur dioxide", id=672, index="very_good", value=4.35478),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gios_stations() -> dict[int, GiosStation]:
|
||||
"""Return the default mocked gios stations."""
|
||||
return {
|
||||
123: GiosStation(id=123, name="Test Name 1", latitude=99.99, longitude=88.88),
|
||||
321: GiosStation(id=321, name="Test Name 2", latitude=77.77, longitude=66.66),
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_gios(
|
||||
hass: HomeAssistant,
|
||||
mock_gios_stations: dict[int, GiosStation],
|
||||
mock_gios_sensors: GiosSensors,
|
||||
) -> AsyncGenerator[MagicMock]:
|
||||
"""Yield a mocked GIOS client."""
|
||||
with (
|
||||
patch("homeassistant.components.gios.Gios", autospec=True) as mock_gios,
|
||||
patch("homeassistant.components.gios.config_flow.Gios", new=mock_gios),
|
||||
):
|
||||
mock_gios.create = AsyncMock(return_value=mock_gios)
|
||||
mock_gios.async_update = AsyncMock(return_value=mock_gios_sensors)
|
||||
mock_gios.measurement_stations = mock_gios_stations
|
||||
mock_gios.station_id = 123
|
||||
mock_gios.station_name = mock_gios_stations[mock_gios.station_id].name
|
||||
|
||||
yield mock_gios
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
) -> None:
|
||||
"""Set up the GIOS integration for testing."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
38
tests/components/gios/fixtures/indexes.json
Normal file
38
tests/components/gios/fixtures/indexes.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"AqIndex": {
|
||||
"Identyfikator stacji pomiarowej": 123,
|
||||
"Data wykonania obliczeń indeksu": "2020-07-31 15:10:17",
|
||||
"Nazwa kategorii indeksu": "Dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika st": "2020-07-31 14:00:00",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika SO2": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika SO2": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika SO2": "Bardzo dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika SO2": "2020-07-31 14:00:00",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||
"Wartość indeksu dla wskaźnika NO2": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika NO2": "Dobry",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika NO2": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika CO": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika CO": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika CO": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika CO": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM10": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika PM10": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika PM10": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika PM10": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika PM2.5": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika PM2.5": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika PM2.5": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika PM2.5": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika O3": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika O3": 1,
|
||||
"Nazwa kategorii indeksu dla wskażnika O3": "Dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika O3": "2020-07-31 14:00:00",
|
||||
"Data danych źródłowych, z których policzono wartość indeksu dla wskaźnika C6H6": "2020-07-31 15:10:17",
|
||||
"Wartość indeksu dla wskaźnika C6H6": 0,
|
||||
"Nazwa kategorii indeksu dla wskażnika C6H6": "Bardzo dobry",
|
||||
"Data wykonania obliczeń indeksu dla wskaźnika C6H6": "2020-07-31 14:00:00",
|
||||
"Status indeksu ogólnego dla stacji pomiarowej": true,
|
||||
"Kod zanieczyszczenia krytycznego": "OZON"
|
||||
}
|
||||
}
|
||||
65
tests/components/gios/fixtures/sensors.json
Normal file
65
tests/components/gios/fixtures/sensors.json
Normal file
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"so2": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 4.35478 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 4.25478 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 4.34309 }
|
||||
]
|
||||
},
|
||||
"c6h6": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 0.23789 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 0.22789 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 0.21315 }
|
||||
]
|
||||
},
|
||||
"co": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 251.874 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 250.874 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 251.097 }
|
||||
]
|
||||
},
|
||||
"no": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 5.1 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 4.0 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 5.2 }
|
||||
]
|
||||
},
|
||||
"no2": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 7.13411 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 7.33411 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 9.32578 }
|
||||
]
|
||||
},
|
||||
"nox": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 5.5 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 6.3 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 4.9 }
|
||||
]
|
||||
},
|
||||
"o3": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 95.7768 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 93.7768 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 89.4232 }
|
||||
]
|
||||
},
|
||||
"pm2.5": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 4 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 4 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 5 }
|
||||
]
|
||||
},
|
||||
"pm10": {
|
||||
"Lista danych pomiarowych": [
|
||||
{ "Data": "2020-07-31 15:00:00", "Wartość": 16.8344 },
|
||||
{ "Data": "2020-07-31 14:00:00", "Wartość": 17.8344 },
|
||||
{ "Data": "2020-07-31 13:00:00", "Wartość": 20.8094 }
|
||||
]
|
||||
}
|
||||
}
|
||||
74
tests/components/gios/fixtures/station.json
Normal file
74
tests/components/gios/fixtures/station.json
Normal file
@@ -0,0 +1,74 @@
|
||||
[
|
||||
{
|
||||
"Identyfikator stanowiska": 672,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "dwutlenek siarki",
|
||||
"Wskaźnik - wzór": "SO2",
|
||||
"Wskaźnik - kod": "SO2",
|
||||
"Id wskaźnika": 1
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 658,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "benzen",
|
||||
"Wskaźnik - wzór": "C6H6",
|
||||
"Wskaźnik - kod": "C6H6",
|
||||
"Id wskaźnika": 10
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 660,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "tlenek węgla",
|
||||
"Wskaźnik - wzór": "CO",
|
||||
"Wskaźnik - kod": "CO",
|
||||
"Id wskaźnika": 8
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 664,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "tlenek azotu",
|
||||
"Wskaźnik - wzór": "NO",
|
||||
"Wskaźnik - kod": "NO",
|
||||
"Id wskaźnika": 16
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 665,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "dwutlenek azotu",
|
||||
"Wskaźnik - wzór": "NO2",
|
||||
"Wskaźnik - kod": "NO2",
|
||||
"Id wskaźnika": 6
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 666,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "tlenki azotu",
|
||||
"Wskaźnik - wzór": "NOx",
|
||||
"Wskaźnik - kod": "NOx",
|
||||
"Id wskaźnika": 7
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 667,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "ozon",
|
||||
"Wskaźnik - wzór": "O3",
|
||||
"Wskaźnik - kod": "O3",
|
||||
"Id wskaźnika": 5
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 670,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "pył zawieszony PM2.5",
|
||||
"Wskaźnik - wzór": "PM2.5",
|
||||
"Wskaźnik - kod": "PM2.5",
|
||||
"Id wskaźnika": 69
|
||||
},
|
||||
{
|
||||
"Identyfikator stanowiska": 14395,
|
||||
"Identyfikator stacji": 117,
|
||||
"Wskaźnik": "pył zawieszony PM10",
|
||||
"Wskaźnik - wzór": "PM10",
|
||||
"Wskaźnik - kod": "PM10",
|
||||
"Id wskaźnika": 3
|
||||
}
|
||||
]
|
||||
@@ -1,93 +1,138 @@
|
||||
"""Define tests for the GIOS config flow."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from gios import ApiError, InvalidSensorsDataError
|
||||
import pytest
|
||||
from gios import ApiError
|
||||
|
||||
from homeassistant.components.gios import config_flow
|
||||
from homeassistant.components.gios.const import CONF_STATION_ID, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from . import STATIONS
|
||||
|
||||
from tests.common import async_load_fixture
|
||||
|
||||
CONFIG = {
|
||||
CONF_NAME: "Foo",
|
||||
CONF_STATION_ID: "123",
|
||||
}
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_gios")
|
||||
|
||||
|
||||
async def test_show_form(hass: HomeAssistant) -> None:
|
||||
"""Test that the form is served with no input."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
assert len(result["data_schema"].schema[CONF_STATION_ID].config["options"]) == 2
|
||||
|
||||
|
||||
async def test_form_with_api_error(hass: HomeAssistant, mock_gios: MagicMock) -> None:
|
||||
async def test_form_with_api_error(hass: HomeAssistant) -> None:
|
||||
"""Test the form is aborted because of API error."""
|
||||
mock_gios.create.side_effect = ApiError("error")
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
side_effect=ApiError("error"),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "errors"),
|
||||
[
|
||||
(
|
||||
InvalidSensorsDataError("Invalid data"),
|
||||
{CONF_STATION_ID: "invalid_sensors_data"},
|
||||
async def test_invalid_sensor_data(hass: HomeAssistant) -> None:
|
||||
"""Test that errors are shown when sensor data is invalid."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
),
|
||||
(ApiError("error"), {"base": "cannot_connect"}),
|
||||
],
|
||||
)
|
||||
async def test_form_submission_errors(
|
||||
hass: HomeAssistant, mock_gios: MagicMock, exception, errors
|
||||
) -> None:
|
||||
"""Test errors during form submission."""
|
||||
mock_gios.async_update.side_effect = exception
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_station",
|
||||
return_value=json.loads(
|
||||
await async_load_fixture(hass, "station.json", DOMAIN)
|
||||
),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_sensor",
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
flow = config_flow.GiosFlowHandler()
|
||||
flow.hass = hass
|
||||
flow.context = {}
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=CONFIG
|
||||
)
|
||||
result = await flow.async_step_user(user_input=CONFIG)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == errors
|
||||
mock_gios.async_update.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=CONFIG
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Test Name 1"
|
||||
assert result["errors"] == {CONF_STATION_ID: "invalid_sensors_data"}
|
||||
|
||||
|
||||
async def test_cannot_connect(hass: HomeAssistant) -> None:
|
||||
"""Test that errors are shown when cannot connect to GIOS server."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._async_get",
|
||||
side_effect=ApiError("error"),
|
||||
),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], CONFIG
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
|
||||
|
||||
async def test_create_entry(hass: HomeAssistant) -> None:
|
||||
"""Test that the user step works."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=CONFIG
|
||||
)
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_station",
|
||||
return_value=json.loads(
|
||||
await async_load_fixture(hass, "station.json", DOMAIN)
|
||||
),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
return_value=json.loads(
|
||||
await async_load_fixture(hass, "sensors.json", DOMAIN)
|
||||
),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_indexes",
|
||||
return_value=json.loads(
|
||||
await async_load_fixture(hass, "indexes.json", DOMAIN)
|
||||
),
|
||||
),
|
||||
):
|
||||
flow = config_flow.GiosFlowHandler()
|
||||
flow.hass = hass
|
||||
flow.context = {}
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Test Name 1"
|
||||
assert result["data"][CONF_STATION_ID] == 123
|
||||
result = await flow.async_step_user(user_input=CONFIG)
|
||||
|
||||
assert result["result"].unique_id == "123"
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "Test Name 1"
|
||||
assert result["data"][CONF_STATION_ID] == CONFIG[CONF_STATION_ID]
|
||||
|
||||
assert flow.context["unique_id"] == "123"
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
"""Test GIOS diagnostics."""
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from syrupy.filters import props
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from . import init_integration
|
||||
|
||||
from tests.components.diagnostics import get_diagnostics_for_config_entry
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics."""
|
||||
assert await get_diagnostics_for_config_entry(
|
||||
hass, hass_client, mock_config_entry
|
||||
) == snapshot(exclude=props("created_at", "modified_at"))
|
||||
entry = await init_integration(hass)
|
||||
|
||||
assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot(
|
||||
exclude=props("created_at", "modified_at")
|
||||
)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""Test init of GIOS integration."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.air_quality import DOMAIN as AIR_QUALITY_PLATFORM
|
||||
from homeassistant.components.gios.const import DOMAIN
|
||||
@@ -11,98 +10,108 @@ from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
from . import STATIONS, init_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
async def test_async_setup_entry(hass: HomeAssistant) -> None:
|
||||
"""Test a successful setup entry."""
|
||||
await init_integration(hass)
|
||||
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
assert state is not None
|
||||
assert state.state != STATE_UNAVAILABLE
|
||||
assert state.state == "4"
|
||||
|
||||
|
||||
async def test_config_not_ready(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
) -> None:
|
||||
async def test_config_not_ready(hass: HomeAssistant) -> None:
|
||||
"""Test for setup failure if connection to GIOS is missing."""
|
||||
mock_gios.create.side_effect = ConnectionError()
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="Home",
|
||||
unique_id=123,
|
||||
data={"station_id": 123, "name": "Home"},
|
||||
)
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
with patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
side_effect=ConnectionError(),
|
||||
):
|
||||
entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_unload_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
async def test_unload_entry(hass: HomeAssistant) -> None:
|
||||
"""Test successful unload of entry."""
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
entry = await init_integration(hass)
|
||||
|
||||
assert await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||
assert not hass.data.get(DOMAIN)
|
||||
|
||||
|
||||
async def test_migrate_device_and_config_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_gios: MagicMock,
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
"""Test device_info identifiers and config entry migration."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id, identifiers={(DOMAIN, 123)}
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title="Home",
|
||||
unique_id=123,
|
||||
data={
|
||||
"station_id": 123,
|
||||
"name": "Home",
|
||||
},
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
indexes = json.loads(await async_load_fixture(hass, "indexes.json", DOMAIN))
|
||||
station = json.loads(await async_load_fixture(hass, "station.json", DOMAIN))
|
||||
sensors = json.loads(await async_load_fixture(hass, "sensors.json", DOMAIN))
|
||||
|
||||
migrated_device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id, identifiers={(DOMAIN, "123")}
|
||||
)
|
||||
assert device_entry.id == migrated_device_entry.id
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_stations",
|
||||
return_value=STATIONS,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_station",
|
||||
return_value=station,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
return_value=sensors,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_indexes",
|
||||
return_value=indexes,
|
||||
),
|
||||
):
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, 123)}
|
||||
)
|
||||
|
||||
async def test_migrate_unique_id_to_str(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
) -> None:
|
||||
"""Test device_info identifiers and config entry migration."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry,
|
||||
unique_id=int(mock_config_entry.unique_id), # type: ignore[misc]
|
||||
)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert mock_config_entry.unique_id == "123"
|
||||
migrated_device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, "123")}
|
||||
)
|
||||
assert device_entry.id == migrated_device_entry.id
|
||||
|
||||
|
||||
async def test_remove_air_quality_entities(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
"""Test remove air_quality entities from registry."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
entity_registry.async_get_or_create(
|
||||
AIR_QUALITY_PLATFORM,
|
||||
DOMAIN,
|
||||
@@ -111,8 +120,7 @@ async def test_remove_air_quality_entities(
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await init_integration(hass)
|
||||
|
||||
entry = entity_registry.async_get("air_quality.home")
|
||||
assert entry is None
|
||||
|
||||
@@ -1,45 +1,42 @@
|
||||
"""Test sensor of GIOS integration."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import MagicMock, patch
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from gios import ApiError
|
||||
from gios.model import GiosSensors
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.gios.const import DOMAIN, SCAN_INTERVAL
|
||||
from homeassistant.components.gios.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as PLATFORM
|
||||
from homeassistant.const import STATE_UNAVAILABLE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import setup_integration
|
||||
from . import init_integration
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
from tests.common import async_fire_time_changed, async_load_fixture, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def override_platforms() -> Generator[None]:
|
||||
"""Override PLATFORMS."""
|
||||
with patch("homeassistant.components.gios.PLATFORMS", [Platform.SENSOR]):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_sensor(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion
|
||||
) -> None:
|
||||
"""Test states of the sensor."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
with patch("homeassistant.components.gios.PLATFORMS", [Platform.SENSOR]):
|
||||
entry = await init_integration(hass)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_availability(hass: HomeAssistant) -> None:
|
||||
"""Ensure that we mark the entities unavailable correctly when service causes an error."""
|
||||
indexes = json.loads(await async_load_fixture(hass, "indexes.json", DOMAIN))
|
||||
sensors = json.loads(await async_load_fixture(hass, "sensors.json", DOMAIN))
|
||||
|
||||
await init_integration(hass)
|
||||
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
assert state
|
||||
assert state.state == "4"
|
||||
@@ -52,22 +49,13 @@ async def test_availability(hass: HomeAssistant) -> None:
|
||||
assert state
|
||||
assert state.state == "good"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_integration")
|
||||
async def test_availability_api_error(
|
||||
hass: HomeAssistant,
|
||||
mock_gios: MagicMock,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Ensure that we mark the entities unavailable correctly when service causes an error."""
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
assert state
|
||||
assert state.state == "4"
|
||||
|
||||
mock_gios.async_update.side_effect = ApiError("Unexpected error")
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
future = utcnow() + timedelta(minutes=60)
|
||||
with patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
side_effect=ApiError("Unexpected error"),
|
||||
):
|
||||
async_fire_time_changed(hass, future)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
assert state
|
||||
@@ -81,16 +69,21 @@ async def test_availability_api_error(
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
mock_gios.async_update.side_effect = None
|
||||
gios_sensors: GiosSensors = mock_gios.async_update.return_value
|
||||
old_pm25 = gios_sensors.pm25
|
||||
old_aqi = gios_sensors.aqi
|
||||
gios_sensors.pm25 = None
|
||||
gios_sensors.aqi = None
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
incomplete_sensors = deepcopy(sensors)
|
||||
incomplete_sensors["pm2.5"] = {}
|
||||
future = utcnow() + timedelta(minutes=120)
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
return_value=incomplete_sensors,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_indexes",
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
async_fire_time_changed(hass, future)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# There is no PM2.5 data so the state should be unavailable
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
@@ -107,12 +100,19 @@ async def test_availability_api_error(
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
gios_sensors.pm25 = old_pm25
|
||||
gios_sensors.aqi = old_aqi
|
||||
|
||||
freezer.tick(SCAN_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
future = utcnow() + timedelta(minutes=180)
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_all_sensors",
|
||||
return_value=sensors,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.gios.coordinator.Gios._get_indexes",
|
||||
return_value=indexes,
|
||||
),
|
||||
):
|
||||
async_fire_time_changed(hass, future)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get("sensor.home_pm2_5")
|
||||
assert state
|
||||
@@ -127,46 +127,9 @@ async def test_availability_api_error(
|
||||
assert state.state == "good"
|
||||
|
||||
|
||||
async def test_dont_create_entities_when_data_missing_for_station(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_gios_sensors: GiosSensors,
|
||||
) -> None:
|
||||
"""Test that no entities are created when data is missing for the station."""
|
||||
mock_gios_sensors.co = None
|
||||
mock_gios_sensors.no = None
|
||||
mock_gios_sensors.no2 = None
|
||||
mock_gios_sensors.nox = None
|
||||
mock_gios_sensors.o3 = None
|
||||
mock_gios_sensors.pm10 = None
|
||||
mock_gios_sensors.pm25 = None
|
||||
mock_gios_sensors.so2 = None
|
||||
mock_gios_sensors.aqi = None
|
||||
mock_gios_sensors.c6h6 = None
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert hass.states.async_entity_ids() == []
|
||||
|
||||
|
||||
async def test_missing_index_data(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_gios: MagicMock,
|
||||
mock_gios_sensors: GiosSensors,
|
||||
) -> None:
|
||||
async def test_invalid_indexes(hass: HomeAssistant) -> None:
|
||||
"""Test states of the sensor when API returns invalid indexes."""
|
||||
mock_gios_sensors.no2.index = None
|
||||
mock_gios_sensors.o3.index = None
|
||||
mock_gios_sensors.pm10.index = None
|
||||
mock_gios_sensors.pm25.index = None
|
||||
mock_gios_sensors.so2.index = None
|
||||
mock_gios_sensors.aqi = None
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
await init_integration(hass, invalid_indexes=True)
|
||||
|
||||
state = hass.states.get("sensor.home_nitrogen_dioxide_index")
|
||||
assert state
|
||||
@@ -193,21 +156,18 @@ async def test_missing_index_data(
|
||||
|
||||
|
||||
async def test_unique_id_migration(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_gios: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
"""Test states of the unique_id migration."""
|
||||
entity_registry.async_get_or_create(
|
||||
Platform.SENSOR,
|
||||
PLATFORM,
|
||||
DOMAIN,
|
||||
"123-pm2.5",
|
||||
suggested_object_id="home_pm2_5",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
await init_integration(hass)
|
||||
|
||||
entry = entity_registry.async_get("sensor.home_pm2_5")
|
||||
assert entry
|
||||
|
||||
@@ -6,7 +6,6 @@ import time
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
from gspread.exceptions import APIError
|
||||
import pytest
|
||||
from requests.models import Response
|
||||
@@ -18,11 +17,8 @@ from homeassistant.components.application_credentials import (
|
||||
)
|
||||
from homeassistant.components.google_sheets.const import DOMAIN
|
||||
from homeassistant.components.google_sheets.services import (
|
||||
ADD_CREATED_COLUMN,
|
||||
DATA,
|
||||
DATA_CONFIG_ENTRY,
|
||||
ROWS,
|
||||
SERVICE_APPEND_SHEET,
|
||||
SERVICE_GET_SHEET,
|
||||
WORKSHEET,
|
||||
)
|
||||
@@ -198,24 +194,12 @@ async def test_expired_token_refresh_failure(
|
||||
assert entries[0].state is expected_state
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("add_created_column_param", "expected_row"),
|
||||
[
|
||||
({ADD_CREATED_COLUMN: True}, ["bar", "2024-01-15 12:30:45.123456"]),
|
||||
({ADD_CREATED_COLUMN: False}, ["bar", ""]),
|
||||
({}, ["bar", "2024-01-15 12:30:45.123456"]),
|
||||
],
|
||||
ids=["created_column_true", "created_column_false", "created_column_default"],
|
||||
)
|
||||
@freeze_time("2024-01-15 12:30:45.123456")
|
||||
async def test_append_sheet(
|
||||
hass: HomeAssistant,
|
||||
setup_integration: ComponentSetup,
|
||||
config_entry: MockConfigEntry,
|
||||
add_created_column_param: dict[str, bool],
|
||||
expected_row: list[str],
|
||||
) -> None:
|
||||
"""Test created column behavior based on add_created_column parameter."""
|
||||
"""Test service call appending to a sheet."""
|
||||
await setup_integration()
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
@@ -223,26 +207,17 @@ async def test_append_sheet(
|
||||
assert entries[0].state is ConfigEntryState.LOADED
|
||||
|
||||
with patch("homeassistant.components.google_sheets.services.Client") as mock_client:
|
||||
mock_worksheet = (
|
||||
mock_client.return_value.open_by_key.return_value.worksheet.return_value
|
||||
)
|
||||
mock_worksheet.get_values.return_value = [["foo", "created"]]
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_APPEND_SHEET,
|
||||
"append_sheet",
|
||||
{
|
||||
DATA_CONFIG_ENTRY: config_entry.entry_id,
|
||||
WORKSHEET: "Sheet1",
|
||||
DATA: {"foo": "bar"},
|
||||
**add_created_column_param,
|
||||
"config_entry": config_entry.entry_id,
|
||||
"worksheet": "Sheet1",
|
||||
"data": {"foo": "bar"},
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_worksheet.append_rows.assert_called_once()
|
||||
rows_data = mock_worksheet.append_rows.call_args[0][0]
|
||||
assert rows_data[0] == expected_row
|
||||
assert len(mock_client.mock_calls) == 8
|
||||
|
||||
|
||||
async def test_get_sheet(
|
||||
|
||||
@@ -10,8 +10,6 @@ from homeassistant.components.icloud.const import (
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
FIRST_NAME = "user"
|
||||
LAST_NAME = "name"
|
||||
USERNAME = "username@me.com"
|
||||
USERNAME_2 = "second_username@icloud.com"
|
||||
PASSWORD = "password"
|
||||
@@ -20,30 +18,6 @@ WITH_FAMILY = True
|
||||
MAX_INTERVAL = 15
|
||||
GPS_ACCURACY_THRESHOLD = 250
|
||||
|
||||
MEMBER_1_FIRST_NAME = "John"
|
||||
MEMBER_1_LAST_NAME = "TRAVOLTA"
|
||||
MEMBER_1_FULL_NAME = MEMBER_1_FIRST_NAME + " " + MEMBER_1_LAST_NAME
|
||||
MEMBER_1_PERSON_ID = (MEMBER_1_FIRST_NAME + MEMBER_1_LAST_NAME).lower()
|
||||
MEMBER_1_APPLE_ID = MEMBER_1_PERSON_ID + "@icloud.com"
|
||||
|
||||
USER_INFO = {
|
||||
"accountFormatter": 0,
|
||||
"firstName": FIRST_NAME,
|
||||
"lastName": LAST_NAME,
|
||||
"membersInfo": {
|
||||
MEMBER_1_PERSON_ID: {
|
||||
"accountFormatter": 0,
|
||||
"firstName": MEMBER_1_FIRST_NAME,
|
||||
"lastName": MEMBER_1_LAST_NAME,
|
||||
"deviceFetchStatus": "DONE",
|
||||
"useAuthWidget": True,
|
||||
"isHSA": True,
|
||||
"appleId": MEMBER_1_APPLE_ID,
|
||||
}
|
||||
},
|
||||
"hasMembers": True,
|
||||
}
|
||||
|
||||
MOCK_CONFIG = {
|
||||
CONF_USERNAME: USERNAME,
|
||||
CONF_PASSWORD: PASSWORD,
|
||||
@@ -55,17 +29,3 @@ MOCK_CONFIG = {
|
||||
TRUSTED_DEVICES = [
|
||||
{"deviceType": "SMS", "areaCode": "", "phoneNumber": "*******58", "deviceId": "1"}
|
||||
]
|
||||
|
||||
DEVICE = {
|
||||
"id": "device1",
|
||||
"name": "iPhone",
|
||||
"deviceStatus": "200",
|
||||
"batteryStatus": "NotCharging",
|
||||
"batteryLevel": 0.8,
|
||||
"rawDeviceModel": "iPhone14,2",
|
||||
"deviceClass": "iPhone",
|
||||
"deviceDisplayName": "iPhone",
|
||||
"prsId": None,
|
||||
"lowPowerMode": False,
|
||||
"location": None,
|
||||
}
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
"""Tests for the iCloud account."""
|
||||
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.icloud.account import IcloudAccount
|
||||
from homeassistant.components.icloud.const import (
|
||||
CONF_GPS_ACCURACY_THRESHOLD,
|
||||
CONF_MAX_INTERVAL,
|
||||
CONF_WITH_FAMILY,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DEVICE, MOCK_CONFIG, USER_INFO, USERNAME
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_store")
|
||||
def mock_store_fixture():
|
||||
"""Mock the storage."""
|
||||
with patch("homeassistant.components.icloud.account.Store") as store_mock:
|
||||
store_instance = Mock(spec=Store)
|
||||
store_instance.path = "/mock/path"
|
||||
store_mock.return_value = store_instance
|
||||
yield store_instance
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_icloud_service_no_userinfo")
|
||||
def mock_icloud_service_no_userinfo_fixture():
|
||||
"""Mock PyiCloudService with devices as dict but no userInfo."""
|
||||
with patch(
|
||||
"homeassistant.components.icloud.account.PyiCloudService"
|
||||
) as service_mock:
|
||||
service_instance = MagicMock()
|
||||
service_instance.requires_2fa = False
|
||||
mock_device = MagicMock()
|
||||
mock_device.status = iter(DEVICE)
|
||||
mock_device.user_info = None
|
||||
service_instance.devices = mock_device
|
||||
service_mock.return_value = service_instance
|
||||
yield service_instance
|
||||
|
||||
|
||||
async def test_setup_fails_when_userinfo_missing(
|
||||
hass: HomeAssistant,
|
||||
mock_store: Mock,
|
||||
mock_icloud_service_no_userinfo: MagicMock,
|
||||
) -> None:
|
||||
"""Test setup fails when userInfo is missing from devices dict."""
|
||||
|
||||
assert mock_icloud_service_no_userinfo is not None
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", unique_id=USERNAME
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
account = IcloudAccount(
|
||||
hass,
|
||||
MOCK_CONFIG[CONF_USERNAME],
|
||||
MOCK_CONFIG[CONF_PASSWORD],
|
||||
mock_store,
|
||||
MOCK_CONFIG[CONF_WITH_FAMILY],
|
||||
MOCK_CONFIG[CONF_MAX_INTERVAL],
|
||||
MOCK_CONFIG[CONF_GPS_ACCURACY_THRESHOLD],
|
||||
config_entry,
|
||||
)
|
||||
|
||||
with pytest.raises(ConfigEntryNotReady, match="No user info found"):
|
||||
account.setup()
|
||||
|
||||
|
||||
class MockAppleDevice:
|
||||
"""Mock "Apple device" which implements the .status(...) method used by the account."""
|
||||
|
||||
def __init__(self, status_dict) -> None:
|
||||
"""Set status."""
|
||||
self._status = status_dict
|
||||
|
||||
def status(self, key):
|
||||
"""Return current status."""
|
||||
return self._status
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Allow indexing the device itself (device[KEY]) to proxy into the raw status dict."""
|
||||
return self._status.get(key)
|
||||
|
||||
|
||||
class MockDevicesContainer:
|
||||
"""Mock devices container which is iterable and indexable returning device status dicts."""
|
||||
|
||||
def __init__(self, userinfo, devices) -> None:
|
||||
"""Initialize with userinfo and list of device objects."""
|
||||
self.user_info = userinfo
|
||||
self._devices = devices
|
||||
|
||||
def __iter__(self):
|
||||
"""Iterate returns device objects (each must have .status(...))."""
|
||||
return iter(self._devices)
|
||||
|
||||
def __len__(self):
|
||||
"""Return number of devices."""
|
||||
return len(self._devices)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
"""Indexing returns device object (which must have .status(...))."""
|
||||
dev = self._devices[idx]
|
||||
if hasattr(dev, "status"):
|
||||
return dev.status(None)
|
||||
return dev
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_icloud_service")
|
||||
def mock_icloud_service_fixture():
|
||||
"""Mock PyiCloudService with devices container that is iterable and indexable returning status dict."""
|
||||
with patch(
|
||||
"homeassistant.components.icloud.account.PyiCloudService",
|
||||
) as service_mock:
|
||||
service_instance = MagicMock()
|
||||
device_obj = MockAppleDevice(DEVICE)
|
||||
devices_container = MockDevicesContainer(USER_INFO, [device_obj])
|
||||
|
||||
service_instance.devices = devices_container
|
||||
service_instance.requires_2fa = False
|
||||
|
||||
service_mock.return_value = service_instance
|
||||
yield service_instance
|
||||
|
||||
|
||||
async def test_setup_success_with_devices(
|
||||
hass: HomeAssistant,
|
||||
mock_store: Mock,
|
||||
mock_icloud_service: MagicMock,
|
||||
) -> None:
|
||||
"""Test successful setup with devices."""
|
||||
|
||||
assert mock_icloud_service is not None
|
||||
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", unique_id=USERNAME
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
account = IcloudAccount(
|
||||
hass,
|
||||
MOCK_CONFIG[CONF_USERNAME],
|
||||
MOCK_CONFIG[CONF_PASSWORD],
|
||||
mock_store,
|
||||
MOCK_CONFIG[CONF_WITH_FAMILY],
|
||||
MOCK_CONFIG[CONF_MAX_INTERVAL],
|
||||
MOCK_CONFIG[CONF_GPS_ACCURACY_THRESHOLD],
|
||||
config_entry,
|
||||
)
|
||||
|
||||
with patch.object(account, "_schedule_next_fetch"):
|
||||
account.setup()
|
||||
|
||||
assert account.api is not None
|
||||
assert account.owner_fullname == "user name"
|
||||
assert "johntravolta" in account.family_members_fullname
|
||||
assert account.family_members_fullname["johntravolta"] == "John TRAVOLTA"
|
||||
@@ -2,67 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator, Iterable
|
||||
import copy
|
||||
from unittest.mock import patch
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from pykoplenti import ExtendedApiClient, MeData, SettingsData, VersionData
|
||||
from pykoplenti import MeData, VersionData
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.kostal_plenticore.coordinator import Plenticore
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
DEFAULT_SETTING_VALUES = {
|
||||
"devices:local": {
|
||||
"Properties:StringCnt": "2",
|
||||
"Properties:String0Features": "1",
|
||||
"Properties:String1Features": "1",
|
||||
"Properties:SerialNo": "42",
|
||||
"Branding:ProductName1": "PLENTICORE",
|
||||
"Branding:ProductName2": "plus 10",
|
||||
"Properties:VersionIOC": "01.45",
|
||||
"Properties:VersionMC": "01.46",
|
||||
"Battery:MinSoc": "5",
|
||||
"Battery:MinHomeComsumption": "50",
|
||||
},
|
||||
"scb:network": {"Hostname": "scb"},
|
||||
}
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min="5",
|
||||
max="100",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit="%",
|
||||
id="Battery:MinSoc",
|
||||
type="byte",
|
||||
),
|
||||
SettingsData(
|
||||
min="50",
|
||||
max="38000",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit="W",
|
||||
id="Battery:MinHomeComsumption",
|
||||
type="byte",
|
||||
),
|
||||
],
|
||||
"scb:network": [
|
||||
SettingsData(
|
||||
min="1",
|
||||
max="63",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Hostname",
|
||||
type="string",
|
||||
)
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
@@ -91,67 +42,37 @@ def mock_installer_config_entry() -> MockConfigEntry:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_settings() -> dict[str, list[SettingsData]]:
|
||||
"""Add setting data to mock_plenticore_client.
|
||||
|
||||
Returns a dictionary with setting data which can be mutated by test cases.
|
||||
"""
|
||||
return copy.deepcopy(DEFAULT_SETTINGS)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_setting_values() -> dict[str, dict[str, str]]:
|
||||
"""Add setting values to mock_plenticore_client.
|
||||
|
||||
Returns a dictionary with setting values which can be mutated by test cases.
|
||||
"""
|
||||
# Add default settings values - this values are always retrieved by the integration on startup
|
||||
return copy.deepcopy(DEFAULT_SETTING_VALUES)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_plenticore_client(
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
) -> Generator[ExtendedApiClient]:
|
||||
"""Return a patched ExtendedApiClient."""
|
||||
def mock_plenticore() -> Generator[Plenticore]:
|
||||
"""Set up a Plenticore mock with some default values."""
|
||||
with patch(
|
||||
"homeassistant.components.kostal_plenticore.coordinator.ExtendedApiClient",
|
||||
autospec=True,
|
||||
) as plenticore_client_class:
|
||||
"homeassistant.components.kostal_plenticore.Plenticore", autospec=True
|
||||
) as mock_api_class:
|
||||
# setup
|
||||
plenticore = mock_api_class.return_value
|
||||
plenticore.async_setup = AsyncMock()
|
||||
plenticore.async_setup.return_value = True
|
||||
|
||||
def default_settings_data(*args):
|
||||
# the get_setting_values method can be called with different argument types and numbers
|
||||
match args:
|
||||
case (str() as module_id, str() as data_id):
|
||||
request = {module_id: [data_id]}
|
||||
case (str() as module_id, Iterable() as data_ids):
|
||||
request = {module_id: data_ids}
|
||||
case ({},):
|
||||
request = args[0]
|
||||
case _:
|
||||
raise NotImplementedError
|
||||
plenticore.device_info = DeviceInfo(
|
||||
configuration_url="http://192.168.1.2",
|
||||
identifiers={("kostal_plenticore", "12345")},
|
||||
manufacturer="Kostal",
|
||||
model="PLENTICORE plus 10",
|
||||
name="scb",
|
||||
sw_version="IOC: 01.45 MC: 01.46",
|
||||
)
|
||||
|
||||
result = {}
|
||||
for module_id, data_ids in request.items():
|
||||
if (values := mock_get_setting_values.get(module_id)) is not None:
|
||||
result[module_id] = {}
|
||||
for data_id in data_ids:
|
||||
if data_id in values:
|
||||
result[module_id][data_id] = values[data_id]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Missing data_id {data_id} in module {module_id}"
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Missing module_id {module_id}")
|
||||
plenticore.client = MagicMock()
|
||||
|
||||
return result
|
||||
plenticore.client.get_version = AsyncMock()
|
||||
plenticore.client.get_version.return_value = VersionData(
|
||||
api_version="0.2.0",
|
||||
hostname="scb",
|
||||
name="PUCK RESTful API",
|
||||
sw_version="01.16.05025",
|
||||
)
|
||||
|
||||
client = plenticore_client_class.return_value
|
||||
client.get_setting_values.side_effect = default_settings_data
|
||||
client.get_settings.return_value = mock_get_settings
|
||||
client.get_me.return_value = MeData(
|
||||
plenticore.client.get_me = AsyncMock()
|
||||
plenticore.client.get_me.return_value = MeData(
|
||||
locked=False,
|
||||
active=True,
|
||||
authenticated=True,
|
||||
@@ -159,14 +80,11 @@ def mock_plenticore_client(
|
||||
anonymous=False,
|
||||
role="USER",
|
||||
)
|
||||
client.get_version.return_value = VersionData(
|
||||
api_version="0.2.0",
|
||||
hostname="scb",
|
||||
name="PUCK RESTful API",
|
||||
sw_version="01.16.05025",
|
||||
)
|
||||
|
||||
yield client
|
||||
plenticore.client.get_process_data = AsyncMock()
|
||||
plenticore.client.get_settings = AsyncMock()
|
||||
|
||||
yield plenticore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Test Kostal Plenticore diagnostics."""
|
||||
|
||||
from unittest.mock import Mock
|
||||
from pykoplenti import SettingsData
|
||||
|
||||
from homeassistant.components.diagnostics import REDACTED
|
||||
from homeassistant.components.kostal_plenticore.coordinator import Plenticore
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import ANY, MockConfigEntry
|
||||
@@ -13,16 +14,30 @@ from tests.typing import ClientSessionGenerator
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_plenticore_client: Mock,
|
||||
mock_plenticore: Plenticore,
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics."""
|
||||
|
||||
# set some test process data for the diagnostics output
|
||||
mock_plenticore_client.get_process_data.return_value = {
|
||||
# set some test process and settings data for the diagnostics output
|
||||
mock_plenticore.client.get_process_data.return_value = {
|
||||
"devices:local": ["HomeGrid_P", "HomePv_P"]
|
||||
}
|
||||
|
||||
mock_plenticore.client.get_settings.return_value = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min="5",
|
||||
max="100",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit="%",
|
||||
id="Battery:MinSoc",
|
||||
type="byte",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
assert await get_diagnostics_for_config_entry(
|
||||
hass, hass_client, init_integration
|
||||
) == {
|
||||
@@ -50,19 +65,8 @@ async def test_entry_diagnostics(
|
||||
"available_process_data": {"devices:local": ["HomeGrid_P", "HomePv_P"]},
|
||||
"available_settings_data": {
|
||||
"devices:local": [
|
||||
"min='5' max='100' default=None access='readwrite' unit='%' id='Battery:MinSoc' type='byte'",
|
||||
"min='50' max='38000' default=None access='readwrite' unit='W' id='Battery:MinHomeComsumption' type='byte'",
|
||||
],
|
||||
"scb:network": [
|
||||
"min='1' max='63' default=None access='readwrite' unit=None id='Hostname' type='string'"
|
||||
],
|
||||
},
|
||||
},
|
||||
"configuration": {
|
||||
"devices:local": {
|
||||
"Properties:StringCnt": "2",
|
||||
"Properties:String0Features": "1",
|
||||
"Properties:String1Features": "1",
|
||||
"min='5' max='100' default=None access='readwrite' unit='%' id='Battery:MinSoc' type='byte'"
|
||||
]
|
||||
},
|
||||
},
|
||||
"device": {
|
||||
@@ -74,28 +78,3 @@ async def test_entry_diagnostics(
|
||||
"sw_version": "IOC: 01.45 MC: 01.46",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_entry_diagnostics_invalid_string_count(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_plenticore_client: Mock,
|
||||
mock_get_setting_values: Mock,
|
||||
init_integration: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test config entry diagnostics if string count is invalid."""
|
||||
|
||||
# set some test process data for the diagnostics output
|
||||
mock_plenticore_client.get_process_data.return_value = {
|
||||
"devices:local": ["HomeGrid_P", "HomePv_P"]
|
||||
}
|
||||
|
||||
mock_get_setting_values["devices:local"]["Properties:StringCnt"] = "invalid"
|
||||
|
||||
diagnostic_data = await get_diagnostics_for_config_entry(
|
||||
hass, hass_client, init_integration
|
||||
)
|
||||
|
||||
assert diagnostic_data["configuration"] == {
|
||||
"devices:local": {"Properties:StringCnt": "invalid"}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Test Kostal Plenticore number."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from pykoplenti import ApiClient, SettingsData
|
||||
import pytest
|
||||
@@ -19,9 +21,75 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("mock_plenticore_client"),
|
||||
]
|
||||
|
||||
@pytest.fixture
|
||||
def mock_plenticore_client() -> Generator[ApiClient]:
|
||||
"""Return a patched ExtendedApiClient."""
|
||||
with patch(
|
||||
"homeassistant.components.kostal_plenticore.coordinator.ExtendedApiClient",
|
||||
autospec=True,
|
||||
) as plenticore_client_class:
|
||||
yield plenticore_client_class.return_value
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_setting_values(mock_plenticore_client: ApiClient) -> list:
|
||||
"""Add a setting value to the given Plenticore client.
|
||||
|
||||
Returns a list with setting values which can be extended by test cases.
|
||||
"""
|
||||
|
||||
mock_plenticore_client.get_settings.return_value = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min="5",
|
||||
max="100",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit="%",
|
||||
id="Battery:MinSoc",
|
||||
type="byte",
|
||||
),
|
||||
SettingsData(
|
||||
min="50",
|
||||
max="38000",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit="W",
|
||||
id="Battery:MinHomeComsumption",
|
||||
type="byte",
|
||||
),
|
||||
],
|
||||
"scb:network": [
|
||||
SettingsData(
|
||||
min="1",
|
||||
max="63",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Hostname",
|
||||
type="string",
|
||||
)
|
||||
],
|
||||
}
|
||||
|
||||
# this values are always retrieved by the integration on startup
|
||||
setting_values = [
|
||||
{
|
||||
"devices:local": {
|
||||
"Properties:SerialNo": "42",
|
||||
"Branding:ProductName1": "PLENTICORE",
|
||||
"Branding:ProductName2": "plus 10",
|
||||
"Properties:VersionIOC": "01.45",
|
||||
"Properties:VersionMC": " 01.46",
|
||||
},
|
||||
"scb:network": {"Hostname": "scb"},
|
||||
}
|
||||
]
|
||||
|
||||
mock_plenticore_client.get_setting_values.side_effect = setting_values
|
||||
|
||||
return setting_values
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
@@ -29,6 +97,8 @@ async def test_setup_all_entries(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_plenticore_client: ApiClient,
|
||||
mock_get_setting_values: list,
|
||||
) -> None:
|
||||
"""Test if all available entries are setup."""
|
||||
|
||||
@@ -48,27 +118,25 @@ async def test_setup_no_entries(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
mock_plenticore_client: ApiClient,
|
||||
mock_get_setting_values: list,
|
||||
) -> None:
|
||||
"""Test that no entries are setup if Plenticore does not provide data."""
|
||||
|
||||
# remove all settings except hostname which is used during setup
|
||||
mock_get_settings.clear()
|
||||
mock_get_settings.update(
|
||||
{
|
||||
"scb:network": [
|
||||
SettingsData(
|
||||
min="1",
|
||||
max="63",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Hostname",
|
||||
type="string",
|
||||
)
|
||||
]
|
||||
}
|
||||
)
|
||||
mock_plenticore_client.get_settings.return_value = {
|
||||
"scb:network": [
|
||||
SettingsData(
|
||||
min="1",
|
||||
max="63",
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Hostname",
|
||||
type="string",
|
||||
)
|
||||
],
|
||||
}
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -83,11 +151,12 @@ async def test_setup_no_entries(
|
||||
async def test_number_has_value(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_plenticore_client: ApiClient,
|
||||
mock_get_setting_values: list,
|
||||
) -> None:
|
||||
"""Test if number has a value if data is provided on update."""
|
||||
|
||||
mock_get_setting_values["devices:local"]["Battery:MinSoc"] = "42"
|
||||
mock_get_setting_values.append({"devices:local": {"Battery:MinSoc": "42"}})
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -107,12 +176,11 @@ async def test_number_has_value(
|
||||
async def test_number_is_unavailable(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_plenticore_client: ApiClient,
|
||||
mock_get_setting_values: list,
|
||||
) -> None:
|
||||
"""Test if number is unavailable if no data is provided on update."""
|
||||
|
||||
del mock_get_setting_values["devices:local"]["Battery:MinSoc"]
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
@@ -130,11 +198,11 @@ async def test_set_value(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_plenticore_client: ApiClient,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_get_setting_values: list,
|
||||
) -> None:
|
||||
"""Test if a new value could be set."""
|
||||
|
||||
mock_get_setting_values["devices:local"]["Battery:MinSoc"] = "42"
|
||||
mock_get_setting_values.append({"devices:local": {"Battery:MinSoc": "42"}})
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
|
||||
@@ -1,28 +1,24 @@
|
||||
"""Test the Kostal Plenticore Solar Inverter select platform."""
|
||||
|
||||
from pykoplenti import SettingsData
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.kostal_plenticore.coordinator import Plenticore
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("mock_plenticore_client"),
|
||||
]
|
||||
|
||||
|
||||
async def test_select_battery_charging_usage_available(
|
||||
hass: HomeAssistant,
|
||||
mock_plenticore: Plenticore,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
) -> None:
|
||||
"""Test that the battery charging usage select entity is added if the settings are available."""
|
||||
|
||||
mock_get_settings["devices:local"].extend(
|
||||
[
|
||||
mock_plenticore.client.get_settings.return_value = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
@@ -42,7 +38,7 @@ async def test_select_battery_charging_usage_available(
|
||||
type="string",
|
||||
),
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -51,63 +47,10 @@ async def test_select_battery_charging_usage_available(
|
||||
|
||||
assert entity_registry.async_is_registered("select.battery_charging_usage_mode")
|
||||
|
||||
entity = entity_registry.async_get("select.battery_charging_usage_mode")
|
||||
assert entity.capabilities.get("options") == [
|
||||
"None",
|
||||
"Battery:SmartBatteryControl:Enable",
|
||||
"Battery:TimeControl:Enable",
|
||||
]
|
||||
|
||||
|
||||
async def test_select_battery_charging_usage_excess_energy_available(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
) -> None:
|
||||
"""Test that the battery charging usage select entity contains the option for excess AC energy."""
|
||||
|
||||
mock_get_settings["devices:local"].extend(
|
||||
[
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:SmartBatteryControl:Enable",
|
||||
type="string",
|
||||
),
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:TimeControl:Enable",
|
||||
type="string",
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entity_registry.async_is_registered("select.battery_charging_usage_mode")
|
||||
|
||||
entity = entity_registry.async_get("select.battery_charging_usage_mode")
|
||||
assert entity.capabilities.get("options") == [
|
||||
"None",
|
||||
"Battery:SmartBatteryControl:Enable",
|
||||
"Battery:TimeControl:Enable",
|
||||
]
|
||||
|
||||
|
||||
async def test_select_battery_charging_usage_not_available(
|
||||
hass: HomeAssistant,
|
||||
mock_plenticore: Plenticore,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
|
||||
@@ -1,52 +1,35 @@
|
||||
"""Test the Kostal Plenticore Solar Inverter switch platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import Mock
|
||||
|
||||
from pykoplenti import SettingsData
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
from homeassistant.components.kostal_plenticore.coordinator import Plenticore
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("mock_plenticore_client"),
|
||||
]
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_installer_setting_not_available(
|
||||
hass: HomeAssistant,
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
mock_plenticore: Plenticore,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test that the manual charge setting is not available when not using the installer login."""
|
||||
mock_get_settings.update(
|
||||
{
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:ManualCharge",
|
||||
type="bool",
|
||||
)
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
mock_plenticore.client.get_settings.return_value = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:ManualCharge",
|
||||
type="bool",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -58,26 +41,25 @@ async def test_installer_setting_not_available(
|
||||
|
||||
async def test_installer_setting_available(
|
||||
hass: HomeAssistant,
|
||||
mock_get_settings: dict[str, list[SettingsData]],
|
||||
mock_plenticore: Plenticore,
|
||||
mock_installer_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test that the manual charge setting is available when using the installer login."""
|
||||
mock_get_settings.update(
|
||||
{
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:ManualCharge",
|
||||
type="bool",
|
||||
)
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
mock_plenticore.client.get_settings.return_value = {
|
||||
"devices:local": [
|
||||
SettingsData(
|
||||
min=None,
|
||||
max=None,
|
||||
default=None,
|
||||
access="readwrite",
|
||||
unit=None,
|
||||
id="Battery:ManualCharge",
|
||||
type="bool",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
mock_installer_config_entry.add_to_hass(hass)
|
||||
|
||||
@@ -85,112 +67,3 @@ async def test_installer_setting_available(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entity_registry.async_is_registered("switch.scb_battery_manual_charge")
|
||||
|
||||
|
||||
async def test_invalid_string_count_value(
|
||||
hass: HomeAssistant,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test that an invalid string count value is handled correctly."""
|
||||
mock_get_setting_values["devices:local"].update({"Properties:StringCnt": "invalid"})
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# ensure no shadow management switch entities were registered
|
||||
assert [
|
||||
name
|
||||
for name, _ in entity_registry.entities.items()
|
||||
if name.startswith("switch.scb_shadow_management_dc_string_")
|
||||
] == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("shadow_mgmt", "string"),
|
||||
[
|
||||
("0", (STATE_OFF, STATE_OFF)),
|
||||
("1", (STATE_ON, STATE_OFF)),
|
||||
("2", (STATE_OFF, STATE_ON)),
|
||||
("3", (STATE_ON, STATE_ON)),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_shadow_management_switch_state(
|
||||
hass: HomeAssistant,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_config_entry: MockConfigEntry,
|
||||
shadow_mgmt: str,
|
||||
string: tuple[str, str],
|
||||
) -> None:
|
||||
"""Test that the state of the shadow management switch is correct."""
|
||||
mock_get_setting_values["devices:local"].update(
|
||||
{"Properties:StringCnt": "2", "Generator:ShadowMgmt:Enable": shadow_mgmt}
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=300))
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
state = hass.states.get("switch.scb_shadow_management_dc_string_1")
|
||||
assert state is not None
|
||||
assert state.state == string[0]
|
||||
|
||||
state = hass.states.get("switch.scb_shadow_management_dc_string_2")
|
||||
assert state is not None
|
||||
assert state.state == string[1]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("initial_shadow_mgmt", "dc_string", "service", "shadow_mgmt"),
|
||||
[
|
||||
("0", 1, SERVICE_TURN_ON, "1"),
|
||||
("0", 2, SERVICE_TURN_ON, "2"),
|
||||
("2", 1, SERVICE_TURN_ON, "3"),
|
||||
("1", 2, SERVICE_TURN_ON, "3"),
|
||||
("1", 1, SERVICE_TURN_OFF, "0"),
|
||||
("2", 2, SERVICE_TURN_OFF, "0"),
|
||||
("3", 1, SERVICE_TURN_OFF, "2"),
|
||||
("3", 2, SERVICE_TURN_OFF, "1"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_shadow_management_switch_action(
|
||||
hass: HomeAssistant,
|
||||
mock_get_setting_values: dict[str, dict[str, str]],
|
||||
mock_plenticore_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
initial_shadow_mgmt: str,
|
||||
dc_string: int,
|
||||
service: str,
|
||||
shadow_mgmt: str,
|
||||
) -> None:
|
||||
"""Test that the shadow management can be switch on/off."""
|
||||
mock_get_setting_values["devices:local"].update(
|
||||
{
|
||||
"Properties:StringCnt": "2",
|
||||
"Generator:ShadowMgmt:Enable": initial_shadow_mgmt,
|
||||
}
|
||||
)
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=300))
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
await hass.services.async_call(
|
||||
SWITCH_DOMAIN,
|
||||
service,
|
||||
target={ATTR_ENTITY_ID: f"switch.scb_shadow_management_dc_string_{dc_string}"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mock_plenticore_client.set_setting_values.assert_called_with(
|
||||
"devices:local", {"Generator:ShadowMgmt:Enable": shadow_mgmt}
|
||||
)
|
||||
|
||||
@@ -5,8 +5,8 @@ from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pypck
|
||||
from pypck import lcn_defs
|
||||
from pypck.module import GroupConnection, ModuleConnection, Serials
|
||||
import pypck.module
|
||||
from pypck.module import GroupConnection, ModuleConnection
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.lcn import PchkConnectionManager
|
||||
@@ -25,28 +25,16 @@ LATEST_CONFIG_ENTRY_VERSION = (LcnFlowHandler.VERSION, LcnFlowHandler.MINOR_VERS
|
||||
class MockModuleConnection(ModuleConnection):
|
||||
"""Fake a LCN module connection."""
|
||||
|
||||
status_request_handler = AsyncMock()
|
||||
activate_status_request_handler = AsyncMock()
|
||||
cancel_status_request_handler = AsyncMock()
|
||||
request_name = AsyncMock(return_value="TestModule")
|
||||
request_serials = AsyncMock(
|
||||
return_value=Serials(
|
||||
hardware_serial=0x1A20A1234,
|
||||
manu=0x01,
|
||||
software_serial=0x190B11,
|
||||
hardware_type=lcn_defs.HardwareType.UPP,
|
||||
)
|
||||
)
|
||||
send_command = AsyncMock(return_value=True)
|
||||
request_status_output = AsyncMock()
|
||||
request_status_relays = AsyncMock()
|
||||
request_status_motor_position = AsyncMock()
|
||||
request_status_binary_sensors = AsyncMock()
|
||||
request_status_variable = AsyncMock()
|
||||
request_status_led_and_logic_ops = AsyncMock()
|
||||
request_status_locked_keys = AsyncMock()
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Construct ModuleConnection instance."""
|
||||
super().__init__(*args, **kwargs)
|
||||
self._serials_known.set()
|
||||
self.serials_request_handler.serial_known.set()
|
||||
|
||||
|
||||
class MockGroupConnection(GroupConnection):
|
||||
@@ -67,10 +55,14 @@ class MockPchkConnectionManager(PchkConnectionManager):
|
||||
async def async_close(self) -> None:
|
||||
"""Mock closing a connection to PCHK."""
|
||||
|
||||
def get_address_conn(self, addr, request_serials=False):
|
||||
"""Get LCN address connection."""
|
||||
return super().get_address_conn(addr, request_serials)
|
||||
|
||||
@patch.object(pypck.connection, "ModuleConnection", MockModuleConnection)
|
||||
def get_module_conn(self, addr):
|
||||
def get_module_conn(self, addr, request_serials=False):
|
||||
"""Get LCN module connection."""
|
||||
return super().get_module_conn(addr)
|
||||
return super().get_module_conn(addr, request_serials)
|
||||
|
||||
@patch.object(pypck.connection, "GroupConnection", MockGroupConnection)
|
||||
def get_group_conn(self, addr):
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'closed',
|
||||
'state': 'open',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_lcn_cover[cover.testmodule_cover_relays-entry]
|
||||
@@ -96,7 +96,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'closed',
|
||||
'state': 'open',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_lcn_cover[cover.testmodule_cover_relays_bs4-entry]
|
||||
@@ -146,7 +146,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'closed',
|
||||
'state': 'open',
|
||||
})
|
||||
# ---
|
||||
# name: test_setup_lcn_cover[cover.testmodule_cover_relays_module-entry]
|
||||
@@ -196,6 +196,6 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'closed',
|
||||
'state': 'open',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -9,10 +9,10 @@
|
||||
7,
|
||||
False,
|
||||
),
|
||||
'hardware_serial': 7013536308,
|
||||
'hardware_type': 11,
|
||||
'hardware_serial': -1,
|
||||
'hardware_type': -1,
|
||||
'name': 'TestModule',
|
||||
'software_serial': 1641233,
|
||||
'software_serial': -1,
|
||||
}),
|
||||
]),
|
||||
'dim_mode': 'STEPS200',
|
||||
@@ -50,10 +50,10 @@
|
||||
7,
|
||||
False,
|
||||
),
|
||||
'hardware_serial': 7013536308,
|
||||
'hardware_type': 11,
|
||||
'hardware_serial': -1,
|
||||
'hardware_type': -1,
|
||||
'name': 'TestModule',
|
||||
'software_serial': 1641233,
|
||||
'software_serial': -1,
|
||||
}),
|
||||
]),
|
||||
'dim_mode': 'STEPS200',
|
||||
@@ -143,10 +143,10 @@
|
||||
7,
|
||||
False,
|
||||
),
|
||||
'hardware_serial': 7013536308,
|
||||
'hardware_type': 11,
|
||||
'hardware_serial': -1,
|
||||
'hardware_type': -1,
|
||||
'name': 'TestModule',
|
||||
'software_serial': 1641233,
|
||||
'software_serial': -1,
|
||||
}),
|
||||
]),
|
||||
'dim_mode': 'STEPS200',
|
||||
|
||||
@@ -52,15 +52,8 @@ async def test_set_hvac_mode_heat(hass: HomeAssistant, entry: MockConfigEntry) -
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_CLIMATE,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{
|
||||
ATTR_ENTITY_ID: "climate.testmodule_climate1",
|
||||
ATTR_HVAC_MODE: HVACMode.OFF,
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get("climate.testmodule_climate1")
|
||||
state.state = HVACMode.OFF
|
||||
|
||||
# command failed
|
||||
lock_regulator.return_value = False
|
||||
|
||||
@@ -63,8 +63,7 @@ async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
state = hass.states.get(COVER_OUTPUTS)
|
||||
assert state is not None
|
||||
assert state.state == CoverState.CLOSED
|
||||
state.state = CoverState.CLOSED
|
||||
|
||||
# command failed
|
||||
control_motor_outputs.return_value = False
|
||||
@@ -111,12 +110,8 @@ async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> Non
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
SERVICE_OPEN_COVER,
|
||||
{ATTR_ENTITY_ID: COVER_OUTPUTS},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(COVER_OUTPUTS)
|
||||
state.state = CoverState.OPEN
|
||||
|
||||
# command failed
|
||||
control_motor_outputs.return_value = False
|
||||
@@ -163,12 +158,8 @@ async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
SERVICE_CLOSE_COVER,
|
||||
{ATTR_ENTITY_ID: COVER_OUTPUTS},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(COVER_OUTPUTS)
|
||||
state.state = CoverState.CLOSING
|
||||
|
||||
# command failed
|
||||
control_motor_outputs.return_value = False
|
||||
@@ -212,8 +203,7 @@ async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None:
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
state = hass.states.get(COVER_RELAYS)
|
||||
assert state is not None
|
||||
assert state.state == CoverState.CLOSED
|
||||
state.state = CoverState.CLOSED
|
||||
|
||||
# command failed
|
||||
control_motor_relays.return_value = False
|
||||
@@ -260,12 +250,8 @@ async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
SERVICE_OPEN_COVER,
|
||||
{ATTR_ENTITY_ID: COVER_RELAYS},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(COVER_RELAYS)
|
||||
state.state = CoverState.OPEN
|
||||
|
||||
# command failed
|
||||
control_motor_relays.return_value = False
|
||||
@@ -312,12 +298,8 @@ async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None:
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
SERVICE_CLOSE_COVER,
|
||||
{ATTR_ENTITY_ID: COVER_RELAYS},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(COVER_RELAYS)
|
||||
state.state = CoverState.CLOSING
|
||||
|
||||
# command failed
|
||||
control_motor_relays.return_value = False
|
||||
@@ -378,8 +360,7 @@ async def test_relays_set_position(
|
||||
MockModuleConnection, "control_motor_relays_position"
|
||||
) as control_motor_relays_position:
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
assert state.state == CoverState.CLOSED
|
||||
state.state = CoverState.CLOSED
|
||||
|
||||
# command failed
|
||||
control_motor_relays_position.return_value = False
|
||||
|
||||
@@ -209,12 +209,8 @@ async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.OFF
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN_LIGHT,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: LIGHT_RELAY1},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(LIGHT_RELAY1)
|
||||
state.state = STATE_ON
|
||||
|
||||
# command failed
|
||||
control_relays.return_value = False
|
||||
|
||||
@@ -93,12 +93,8 @@ async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> N
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dim_output") as dim_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: SWITCH_OUTPUT1},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(SWITCH_OUTPUT1)
|
||||
state.state = STATE_ON
|
||||
|
||||
# command failed
|
||||
dim_output.return_value = False
|
||||
@@ -180,12 +176,8 @@ async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.OFF
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: SWITCH_RELAY1},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(SWITCH_RELAY1)
|
||||
state.state = STATE_ON
|
||||
|
||||
# command failed
|
||||
control_relays.return_value = False
|
||||
@@ -265,12 +257,8 @@ async def test_regulatorlock_turn_off(
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: SWITCH_REGULATOR1},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(SWITCH_REGULATOR1)
|
||||
state.state = STATE_ON
|
||||
|
||||
# command failed
|
||||
lock_regulator.return_value = False
|
||||
@@ -352,12 +340,8 @@ async def test_keylock_turn_off(hass: HomeAssistant, entry: MockConfigEntry) ->
|
||||
states = [KeyLockStateModifier.NOCHANGE] * 8
|
||||
states[0] = KeyLockStateModifier.OFF
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: SWITCH_KEYLOCKK1},
|
||||
blocking=True,
|
||||
)
|
||||
state = hass.states.get(SWITCH_KEYLOCKK1)
|
||||
state.state = STATE_ON
|
||||
|
||||
# command failed
|
||||
lock_keys.return_value = False
|
||||
|
||||
@@ -600,6 +600,23 @@ MOCK_SUBENTRY_SWITCH_COMPONENT = {
|
||||
"optimistic": True,
|
||||
},
|
||||
}
|
||||
MOCK_SUBENTRY_TEXT_COMPONENT = {
|
||||
"09261f6feed443e7b7d5f3fbe2a47413": {
|
||||
"platform": "text",
|
||||
"name": "MOTD",
|
||||
"entity_category": None,
|
||||
"command_topic": "test-topic",
|
||||
"command_template": "{{ value }}",
|
||||
"state_topic": "test-topic",
|
||||
"min": 0.0,
|
||||
"max": 10.0,
|
||||
"mode": "password",
|
||||
"pattern": "^[a-z_]*$",
|
||||
"value_template": "{{ value_json.value }}",
|
||||
"retain": False,
|
||||
"entity_picture": "https://example.com/09261f6feed443e7b7d5f3fbe2a47413",
|
||||
},
|
||||
}
|
||||
|
||||
MOCK_SUBENTRY_AVAILABILITY_DATA = {
|
||||
"availability": {
|
||||
@@ -725,6 +742,10 @@ MOCK_SWITCH_SUBENTRY_DATA = {
|
||||
"device": MOCK_SUBENTRY_DEVICE_DATA | {"mqtt_settings": {"qos": 0}},
|
||||
"components": MOCK_SUBENTRY_SWITCH_COMPONENT,
|
||||
}
|
||||
MOCK_TEXT_SUBENTRY_DATA = {
|
||||
"device": MOCK_SUBENTRY_DEVICE_DATA | {"mqtt_settings": {"qos": 0}},
|
||||
"components": MOCK_SUBENTRY_TEXT_COMPONENT,
|
||||
}
|
||||
MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA = {
|
||||
"device": MOCK_SUBENTRY_DEVICE_DATA | {"mqtt_settings": {"qos": 0}},
|
||||
"components": MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA,
|
||||
|
||||
@@ -62,6 +62,7 @@ from .common import (
|
||||
MOCK_SENSOR_SUBENTRY_DATA_STATE_CLASS,
|
||||
MOCK_SIREN_SUBENTRY_DATA,
|
||||
MOCK_SWITCH_SUBENTRY_DATA,
|
||||
MOCK_TEXT_SUBENTRY_DATA,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, MockMqttReasonCode, get_schema_suggested_value
|
||||
@@ -3720,6 +3721,65 @@ async def test_migrate_of_incompatible_config_entry(
|
||||
"Milk notifier Outlet",
|
||||
id="switch",
|
||||
),
|
||||
pytest.param(
|
||||
MOCK_TEXT_SUBENTRY_DATA,
|
||||
{"name": "Milk notifier", "mqtt_settings": {"qos": 0}},
|
||||
{"name": "MOTD"},
|
||||
{},
|
||||
(),
|
||||
{
|
||||
"command_topic": "test-topic",
|
||||
"command_template": "{{ value }}",
|
||||
"state_topic": "test-topic",
|
||||
"value_template": "{{ value_json.value }}",
|
||||
"retain": False,
|
||||
"text_advanced_settings": {
|
||||
"min": 0,
|
||||
"max": 10,
|
||||
"mode": "password",
|
||||
"pattern": "^[a-z_]*$",
|
||||
},
|
||||
},
|
||||
(
|
||||
(
|
||||
{"command_topic": "test-topic#invalid"},
|
||||
{"command_topic": "invalid_publish_topic"},
|
||||
),
|
||||
(
|
||||
{
|
||||
"command_topic": "test-topic",
|
||||
"state_topic": "test-topic#invalid",
|
||||
},
|
||||
{"state_topic": "invalid_subscribe_topic"},
|
||||
),
|
||||
(
|
||||
{
|
||||
"command_topic": "test-topic",
|
||||
"text_advanced_settings": {
|
||||
"min": 20,
|
||||
"max": 10,
|
||||
"mode": "password",
|
||||
"pattern": "^[a-z_]*$",
|
||||
},
|
||||
},
|
||||
{"text_advanced_settings": "max_below_min"},
|
||||
),
|
||||
(
|
||||
{
|
||||
"command_topic": "test-topic",
|
||||
"text_advanced_settings": {
|
||||
"min": 0,
|
||||
"max": 10,
|
||||
"mode": "password",
|
||||
"pattern": "(",
|
||||
},
|
||||
},
|
||||
{"text_advanced_settings": "invalid_regular_expression"},
|
||||
),
|
||||
),
|
||||
"Milk notifier MOTD",
|
||||
id="text",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_subentry_configflow(
|
||||
@@ -3770,10 +3830,6 @@ async def test_subentry_configflow(
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "entity"
|
||||
assert result["errors"] == {}
|
||||
assert "description_placeholders" in result
|
||||
for placeholder, translation in TRANSLATION_DESCRIPTION_PLACEHOLDERS.items():
|
||||
assert placeholder in result["description_placeholders"]
|
||||
assert result["description_placeholders"][placeholder] == translation
|
||||
|
||||
# Process entity flow (initial step)
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.bathroom_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -263,7 +263,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.living_room_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -386,7 +386,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.badkamer_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -451,7 +451,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.bios_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -516,7 +516,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.jessie_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -581,7 +581,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'select.woonkamer_thermostat_schedule',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.adam_outdoor_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -69,7 +69,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -125,7 +125,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -293,7 +293,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.bathroom_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -455,7 +455,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.emma_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -511,7 +511,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.emma_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -673,7 +673,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.jip_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -729,7 +729,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.jip_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -838,7 +838,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.lisa_badkamer_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -894,7 +894,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.lisa_badkamer_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -1062,7 +1062,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.living_room_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -1283,7 +1283,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.tom_badkamer_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -1339,7 +1339,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.tom_badkamer_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -1556,7 +1556,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -1612,7 +1612,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -2948,7 +2948,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.smile_anna_p1_outdoor_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -3004,7 +3004,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_cooling_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -3060,7 +3060,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_heating_setpoint',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -3169,7 +3169,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'sensor.anna_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
@@ -3613,7 +3613,7 @@
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.smile_anna_outdoor_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
|
||||
@@ -6,7 +6,6 @@ from unittest.mock import patch
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.rest_command import DOMAIN
|
||||
from homeassistant.const import (
|
||||
@@ -456,34 +455,3 @@ async def test_rest_command_response_iter_chunked(
|
||||
|
||||
# Verify iter_chunked was called with a chunk size
|
||||
assert mock_iter_chunked.called
|
||||
|
||||
|
||||
async def test_rest_command_skip_url_encoding(
|
||||
hass: HomeAssistant,
|
||||
setup_component: ComponentSetup,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Check URL encoding."""
|
||||
config = {
|
||||
"skip_url_encoding_test": {
|
||||
"url": "0%2C",
|
||||
"method": "get",
|
||||
"skip_url_encoding": True,
|
||||
},
|
||||
"with_url_encoding_test": {
|
||||
"url": "1,",
|
||||
"method": "get",
|
||||
},
|
||||
}
|
||||
|
||||
await setup_component(config)
|
||||
|
||||
aioclient_mock.get(URL("0%2C", encoded=True), content=b"success")
|
||||
aioclient_mock.get(URL("1,"), content=b"success")
|
||||
|
||||
await hass.services.async_call(DOMAIN, "skip_url_encoding_test", {}, blocking=True)
|
||||
await hass.services.async_call(DOMAIN, "with_url_encoding_test", {}, blocking=True)
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert str(aioclient_mock.mock_calls[0][1]) == "0%2C"
|
||||
assert str(aioclient_mock.mock_calls[1][1]) == "1,"
|
||||
|
||||
@@ -247,7 +247,6 @@ MOCK_CONFIG = {
|
||||
"wifi": {"sta": {"enable": True}, "sta1": {"enable": False}},
|
||||
"ws": {"enable": False, "server": None},
|
||||
"voltmeter:100": {"xvoltage": {"unit": "ppm"}},
|
||||
"smoke:0": {"id": 0, "name": "test channel name"},
|
||||
"script:1": {"id": 1, "name": "test_script.js", "enable": True},
|
||||
"script:2": {"id": 2, "name": "test_script_2.js", "enable": False},
|
||||
"script:3": {"id": 3, "name": BLE_SCRIPT_NAME, "enable": False},
|
||||
@@ -440,7 +439,6 @@ MOCK_STATUS_RPC = {
|
||||
"current_C": 12.3,
|
||||
"output": True,
|
||||
},
|
||||
"smoke:0": {"id": 0, "alarm": False, "mute": False},
|
||||
"script:1": {
|
||||
"id": 1,
|
||||
"running": True,
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'calibration',
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-blutrv:200-calibration',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
@@ -78,7 +78,7 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'cable_unplugged_with_channel_name',
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-flood:0-flood_cable_unplugged',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
@@ -127,7 +127,7 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'flood_with_channel_name',
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-flood:0-flood',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
@@ -176,7 +176,7 @@
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'mute_with_channel_name',
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-flood:0-mute',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user