mirror of
https://github.com/home-assistant/core.git
synced 2025-09-19 18:09:48 +00:00
Compare commits
80 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c2f6255d16 | ||
![]() |
e5fd08ae76 | ||
![]() |
4b5633d9d8 | ||
![]() |
a9c6a06704 | ||
![]() |
0faa8efd5a | ||
![]() |
5a257b090e | ||
![]() |
41fb6a537f | ||
![]() |
b166c32eb8 | ||
![]() |
288acfb511 | ||
![]() |
2cb9682303 | ||
![]() |
7e52170789 | ||
![]() |
979b3d4269 | ||
![]() |
9772014bce | ||
![]() |
f8763c49ef | ||
![]() |
b4ef00659c | ||
![]() |
df49c53bb6 | ||
![]() |
8dfe483b38 | ||
![]() |
b45d7cbbc3 | ||
![]() |
239ba9b1cc | ||
![]() |
2d5a75d4f2 | ||
![]() |
e1ad3f05e6 | ||
![]() |
b9280edbfa | ||
![]() |
010993fc5f | ||
![]() |
713931661e | ||
![]() |
af06521f66 | ||
![]() |
c32f57f85a | ||
![]() |
171061a778 | ||
![]() |
476ea35bdb | ||
![]() |
00e6866664 | ||
![]() |
201bf95ab8 | ||
![]() |
ff22bbd0e4 | ||
![]() |
fd8d4e937c | ||
![]() |
7903348d79 | ||
![]() |
090dbba06e | ||
![]() |
af77e69eb0 | ||
![]() |
23e7638687 | ||
![]() |
36b722960a | ||
![]() |
3dd241a398 | ||
![]() |
b5a9c3d1f6 | ||
![]() |
eca714a45a | ||
![]() |
8049699efb | ||
![]() |
7c6afd50dc | ||
![]() |
42d8889778 | ||
![]() |
a4c0304e1f | ||
![]() |
c63e688ba8 | ||
![]() |
16298b4195 | ||
![]() |
da23eb22db | ||
![]() |
4bd1d0199b | ||
![]() |
efe7050030 | ||
![]() |
79ff85f517 | ||
![]() |
73ad4caf94 | ||
![]() |
e3d649d349 | ||
![]() |
657e3488ba | ||
![]() |
7508c14a53 | ||
![]() |
ac84970da8 | ||
![]() |
30073f3493 | ||
![]() |
3abd7b8ba3 | ||
![]() |
62bc6e4bf6 | ||
![]() |
5faa189fef | ||
![]() |
e09ae1c83d | ||
![]() |
7b20299de7 | ||
![]() |
81e501aba1 | ||
![]() |
568ac22ce8 | ||
![]() |
c71ab054f1 | ||
![]() |
bea201f9f6 | ||
![]() |
dda90bc04c | ||
![]() |
a033e4c88d | ||
![]() |
42b6f83e7c | ||
![]() |
cb937bc115 | ||
![]() |
bec569caf9 | ||
![]() |
3390fb32a8 | ||
![]() |
3ebb58f780 | ||
![]() |
30b131d3b9 | ||
![]() |
cd40232beb | ||
![]() |
f27fe365c5 | ||
![]() |
1c769418fb | ||
![]() |
db7c2dab52 | ||
![]() |
627377872b | ||
![]() |
8504162539 | ||
![]() |
67c6a1d436 |
@@ -19,10 +19,20 @@ class ApSystemsEntity(Entity):
|
||||
data: ApSystemsData,
|
||||
) -> None:
|
||||
"""Initialize the APsystems entity."""
|
||||
|
||||
# Handle device version safely
|
||||
sw_version = None
|
||||
if data.coordinator.device_version:
|
||||
version_parts = data.coordinator.device_version.split(" ")
|
||||
if len(version_parts) > 1:
|
||||
sw_version = version_parts[1]
|
||||
else:
|
||||
sw_version = version_parts[0]
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
sw_version=sw_version,
|
||||
)
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Assist Satellite intents."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -7,6 +9,8 @@ from homeassistant.helpers import entity_registry as er, intent
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
|
||||
EXCLUDED_DOMAINS: Final[set[str]] = {"voip"}
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the intents."""
|
||||
@@ -30,19 +34,36 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Find all assist satellite entities that are not the one invoking the intent
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity in hass.states.async_entity_ids(DOMAIN)
|
||||
if (entry := ent_reg.async_get(entity))
|
||||
and entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
}
|
||||
entities: dict[str, er.RegistryEntry] = {}
|
||||
for entity in hass.states.async_entity_ids(DOMAIN):
|
||||
entry = ent_reg.async_get(entity)
|
||||
if (
|
||||
(entry is None)
|
||||
or (
|
||||
# Supports announce
|
||||
not (
|
||||
entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
)
|
||||
)
|
||||
# Not the invoking device
|
||||
or (intent_obj.device_id and (entry.device_id == intent_obj.device_id))
|
||||
):
|
||||
# Skip satellite
|
||||
continue
|
||||
|
||||
if intent_obj.device_id:
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity, entry in entities.items()
|
||||
if entry.device_id != intent_obj.device_id
|
||||
}
|
||||
# Check domain of config entry against excluded domains
|
||||
if (
|
||||
entry.config_entry_id
|
||||
and (
|
||||
config_entry := hass.config_entries.async_get_entry(
|
||||
entry.config_entry_id
|
||||
)
|
||||
)
|
||||
and (config_entry.domain in EXCLUDED_DOMAINS)
|
||||
):
|
||||
continue
|
||||
|
||||
entities[entity] = entry
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -54,7 +75,6 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.async_set_speech("Done")
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass, replace
|
||||
from enum import StrEnum
|
||||
@@ -560,8 +561,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(list_backups_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -588,7 +596,7 @@ class BackupManager:
|
||||
name=agent_backup.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backups[backup_id].agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backups[backup_id].agents[agent_id] = AgentBackupStatus(
|
||||
protected=agent_backup.protected,
|
||||
size=agent_backup.size,
|
||||
)
|
||||
@@ -611,8 +619,15 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(get_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -640,7 +655,7 @@ class BackupManager:
|
||||
name=result.name,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
backup.agents[agent_ids[idx]] = AgentBackupStatus(
|
||||
backup.agents[agent_id] = AgentBackupStatus(
|
||||
protected=result.protected,
|
||||
size=result.size,
|
||||
)
|
||||
@@ -663,21 +678,31 @@ class BackupManager:
|
||||
return None
|
||||
return with_automatic_settings
|
||||
|
||||
async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]:
|
||||
async def async_delete_backup(
|
||||
self, backup_id: str, *, agent_ids: list[str] | None = None
|
||||
) -> dict[str, Exception]:
|
||||
"""Delete a backup."""
|
||||
agent_errors: dict[str, Exception] = {}
|
||||
agent_ids = list(self.backup_agents)
|
||||
if agent_ids is None:
|
||||
agent_ids = list(self.backup_agents)
|
||||
|
||||
delete_backup_results = await asyncio.gather(
|
||||
*(
|
||||
agent.async_delete_backup(backup_id)
|
||||
for agent in self.backup_agents.values()
|
||||
self.backup_agents[agent_id].async_delete_backup(backup_id)
|
||||
for agent_id in agent_ids
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(delete_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
agent_errors[agent_id] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
@@ -710,35 +735,71 @@ class BackupManager:
|
||||
# Run the include filter first to ensure we only consider backups that
|
||||
# should be included in the deletion process.
|
||||
backups = include_filter(backups)
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
LOGGER.debug("Backups returned by include filter: %s", backups)
|
||||
LOGGER.debug(
|
||||
"Backups returned by include filter by agent: %s",
|
||||
{agent_id: list(backups) for agent_id, backups in backups_by_agent.items()},
|
||||
)
|
||||
|
||||
backups_to_delete = delete_filter(backups)
|
||||
|
||||
LOGGER.debug("Backups returned by delete filter: %s", backups_to_delete)
|
||||
|
||||
if not backups_to_delete:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
backups_to_delete = dict(
|
||||
sorted(
|
||||
backups_to_delete.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
)
|
||||
for backup_id, backup in sorted(
|
||||
backups_to_delete.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
):
|
||||
for agent_id in backup.agents:
|
||||
backups_to_delete_by_agent[agent_id][backup_id] = backup
|
||||
LOGGER.debug(
|
||||
"Backups returned by delete filter by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
for agent_id, to_delete_from_agent in backups_to_delete_by_agent.items():
|
||||
if len(to_delete_from_agent) >= len(backups_by_agent[agent_id]):
|
||||
# Never delete the last backup.
|
||||
last_backup = to_delete_from_agent.popitem()
|
||||
LOGGER.debug(
|
||||
"Keeping the last backup %s for agent %s", last_backup, agent_id
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Backups to delete by agent: %s",
|
||||
{
|
||||
agent_id: list(backups)
|
||||
for agent_id, backups in backups_to_delete_by_agent.items()
|
||||
},
|
||||
)
|
||||
|
||||
if len(backups_to_delete) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = backups_to_delete.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", backups_to_delete)
|
||||
|
||||
if not backups_to_delete:
|
||||
if not backup_ids_to_delete:
|
||||
return
|
||||
|
||||
backup_ids = list(backups_to_delete)
|
||||
backup_ids = list(backup_ids_to_delete)
|
||||
delete_results = await asyncio.gather(
|
||||
*(self.async_delete_backup(backup_id) for backup_id in backups_to_delete)
|
||||
*(
|
||||
self.async_delete_backup(backup_id, agent_ids=list(agent_ids))
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items()
|
||||
)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
|
@@ -411,7 +411,7 @@ def ble_device_matches(
|
||||
) and service_data_uuid not in service_info.service_data:
|
||||
return False
|
||||
|
||||
if manufacturer_id := matcher.get(MANUFACTURER_ID):
|
||||
if (manufacturer_id := matcher.get(MANUFACTURER_ID)) is not None:
|
||||
if manufacturer_id not in service_info.manufacturer_data:
|
||||
return False
|
||||
|
||||
|
@@ -3,21 +3,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
import logging
|
||||
import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
from aiohttp import ClientError
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
async_files_delete_file,
|
||||
async_files_download_details,
|
||||
async_files_list,
|
||||
async_files_upload_details,
|
||||
)
|
||||
from hass_nabucasa.api import CloudApiNonRetryableError
|
||||
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list
|
||||
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -28,20 +23,11 @@ from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
"""Calculate the MD5 hash of a file."""
|
||||
file_hash = hashlib.md5()
|
||||
async for chunk in stream:
|
||||
file_hash.update(chunk)
|
||||
return base64.b64encode(file_hash.digest()).decode()
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
@@ -109,63 +95,14 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
try:
|
||||
details = await async_files_download_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
content = await self._cloud.files.download(
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
except CloudError as err:
|
||||
raise BackupAgentError(f"Failed to download backup: {err}") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
return ChunkAsyncStreamIterator(content)
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
@@ -182,15 +119,19 @@ class CloudBackupAgent(BackupAgent):
|
||||
if not backup.protected:
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
size = backup.size
|
||||
try:
|
||||
base64md5hash = await calculate_b64md5(open_stream, size)
|
||||
except FilesError as err:
|
||||
raise BackupAgentError(err) from err
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
await self._cloud.files.upload(
|
||||
storage_type=StorageType.BACKUP,
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
@@ -198,9 +139,19 @@ class CloudBackupAgent(BackupAgent):
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
except CloudApiNonRetryableError as err:
|
||||
if err.code == "NC-SH-FH-03":
|
||||
raise BackupAgentError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="backup_size_too_large",
|
||||
translation_placeholders={
|
||||
"size": str(round(size / (1024**3), 2))
|
||||
},
|
||||
) from err
|
||||
raise BackupAgentError(f"Failed to upload backup {err}") from err
|
||||
except CloudError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
raise BackupAgentError(f"Failed to upload backup {err}") from err
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
@@ -227,7 +178,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
try:
|
||||
await async_files_delete_file(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
storage_type=StorageType.BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
@@ -236,7 +187,9 @@ class CloudBackupAgent(BackupAgent):
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
backups = await async_files_list(
|
||||
self._cloud, storage_type=StorageType.BACKUP
|
||||
)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.88.1"],
|
||||
"requirements": ["hass-nabucasa==0.90.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -17,6 +17,11 @@
|
||||
"subscription_expiration": "Subscription expiration"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"backup_size_too_large": {
|
||||
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_gender": {
|
||||
"title": "The {deprecated_option} text-to-speech option is deprecated",
|
||||
|
@@ -140,8 +140,10 @@ def get_accounts(client, version):
|
||||
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
|
||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
|
||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
|
||||
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
|
||||
API_ACCOUNT_AMOUNT: (
|
||||
float(account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE])
|
||||
+ float(account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE])
|
||||
),
|
||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
|
||||
}
|
||||
for account in accounts
|
||||
|
@@ -44,9 +44,7 @@ class DiscovergyUpdateCoordinator(DataUpdateCoordinator[Reading]):
|
||||
)
|
||||
except InvalidLogin as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Auth expired while fetching last reading for meter {self.meter.meter_id}"
|
||||
"Auth expired while fetching last reading"
|
||||
) from err
|
||||
except (HTTPError, DiscovergyClientError) as err:
|
||||
raise UpdateFailed(
|
||||
f"Error while fetching last reading for meter {self.meter.meter_id}"
|
||||
) from err
|
||||
raise UpdateFailed(f"Error while fetching last reading: {err}") from err
|
||||
|
@@ -23,7 +23,7 @@ from homeassistant.components.climate import (
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
|
||||
from . import EconetConfigEntry
|
||||
from .const import DOMAIN
|
||||
@@ -35,8 +35,13 @@ ECONET_STATE_TO_HA = {
|
||||
ThermostatOperationMode.OFF: HVACMode.OFF,
|
||||
ThermostatOperationMode.AUTO: HVACMode.HEAT_COOL,
|
||||
ThermostatOperationMode.FAN_ONLY: HVACMode.FAN_ONLY,
|
||||
ThermostatOperationMode.EMERGENCY_HEAT: HVACMode.HEAT,
|
||||
}
|
||||
HA_STATE_TO_ECONET = {
|
||||
value: key
|
||||
for key, value in ECONET_STATE_TO_HA.items()
|
||||
if key != ThermostatOperationMode.EMERGENCY_HEAT
|
||||
}
|
||||
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
|
||||
|
||||
ECONET_FAN_STATE_TO_HA = {
|
||||
ThermostatFanMode.AUTO: FAN_AUTO,
|
||||
@@ -209,7 +214,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
||||
|
||||
def turn_aux_heat_on(self) -> None:
|
||||
"""Turn auxiliary heater on."""
|
||||
async_create_issue(
|
||||
create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"migrate_aux_heat",
|
||||
@@ -223,7 +228,7 @@ class EcoNetThermostat(EcoNetEntity[Thermostat], ClimateEntity):
|
||||
|
||||
def turn_aux_heat_off(self) -> None:
|
||||
"""Turn auxiliary heater off."""
|
||||
async_create_issue(
|
||||
create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"migrate_aux_heat",
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.0.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.1.0"]
|
||||
}
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["eheimdigital==1.0.5"],
|
||||
"requirements": ["eheimdigital==1.0.6"],
|
||||
"zeroconf": [
|
||||
{ "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." }
|
||||
]
|
||||
|
@@ -4,12 +4,16 @@ from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from . import api
|
||||
from .coordinator import (
|
||||
@@ -44,7 +48,9 @@ async def async_setup_entry(
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
hop_coordinator = ElectricKiwiHOPDataCoordinator(hass, entry, ek_api)
|
||||
account_coordinator = ElectricKiwiAccountDataCoordinator(hass, entry, ek_api)
|
||||
@@ -53,6 +59,8 @@ async def async_setup_entry(
|
||||
await ek_api.set_active_session()
|
||||
await hop_coordinator.async_config_entry_first_refresh()
|
||||
await account_coordinator.async_config_entry_first_refresh()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except ApiException as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
@@ -70,3 +78,53 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: ElectricKiwiConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, config_entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigEntryElectricKiwiAuth(
|
||||
aiohttp_client.async_get_clientsession(hass), session
|
||||
)
|
||||
)
|
||||
try:
|
||||
await ek_api.set_active_session()
|
||||
connection_details = await ek_api.get_connection_details()
|
||||
except AuthException:
|
||||
config_entry.async_start_reauth(hass)
|
||||
return False
|
||||
except ApiException:
|
||||
return False
|
||||
unique_id = str(ek_api.customer_number)
|
||||
identifier = ek_api.electricity.identifier
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=unique_id, minor_version=2
|
||||
)
|
||||
entity_registry = er.async_get(hass)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
for entity in entity_entries:
|
||||
assert entity.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=entity.unique_id.replace(
|
||||
f"{unique_id}_{connection_details.id}", f"{unique_id}_{identifier}"
|
||||
),
|
||||
)
|
||||
|
||||
return True
|
||||
|
@@ -2,17 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from electrickiwi_api import AbstractAuth
|
||||
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
|
||||
|
||||
from .const import API_BASE_URL
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(AbstractAuth):
|
||||
class ConfigEntryElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
@@ -29,4 +28,21 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
"""Return a valid access token."""
|
||||
await self._oauth_session.async_ensure_token_valid()
|
||||
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
return str(self._oauth_session.token["access_token"])
|
||||
|
||||
|
||||
class ConfigFlowElectricKiwiAuth(AbstractAuth):
|
||||
"""Provide Electric Kiwi authentication tied to an OAuth2 based config flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
token: str,
|
||||
) -> None:
|
||||
"""Initialize ConfigFlowFitbitApi."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass), API_BASE_URL)
|
||||
self._token = token
|
||||
|
||||
async def async_get_access_token(self) -> str:
|
||||
"""Return the token for the Electric Kiwi API."""
|
||||
return self._token
|
||||
|
@@ -6,9 +6,14 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigFlowResult
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import DOMAIN, SCOPE_VALUES
|
||||
|
||||
|
||||
@@ -17,6 +22,8 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
):
|
||||
"""Config flow to handle Electric Kiwi OAuth2 authentication."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
DOMAIN = DOMAIN
|
||||
|
||||
@property
|
||||
@@ -40,12 +47,30 @@ class ElectricKiwiOauth2FlowHandler(
|
||||
) -> ConfigFlowResult:
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={CONF_NAME: self._get_reauth_entry().title},
|
||||
)
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
|
||||
"""Create an entry for Electric Kiwi."""
|
||||
existing_entry = await self.async_set_unique_id(DOMAIN)
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=data)
|
||||
return await super().async_oauth_create_entry(data)
|
||||
ek_api = ElectricKiwiApi(
|
||||
api.ConfigFlowElectricKiwiAuth(self.hass, data["token"]["access_token"])
|
||||
)
|
||||
|
||||
try:
|
||||
session = await ek_api.get_active_session()
|
||||
except ApiException:
|
||||
return self.async_abort(reason="connection_error")
|
||||
|
||||
unique_id = str(session.data.customer_number)
|
||||
await self.async_set_unique_id(unique_id)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=unique_id, data=data)
|
||||
|
@@ -8,4 +8,4 @@ OAUTH2_AUTHORIZE = "https://welcome.electrickiwi.co.nz/oauth/authorize"
|
||||
OAUTH2_TOKEN = "https://welcome.electrickiwi.co.nz/oauth/token"
|
||||
API_BASE_URL = "https://api.electrickiwi.co.nz"
|
||||
|
||||
SCOPE_VALUES = "read_connection_detail read_billing_frequency read_account_running_balance read_consumption_summary read_consumption_averages read_hop_intervals_config read_hop_connection save_hop_connection read_session"
|
||||
SCOPE_VALUES = "read_customer_details read_connection_detail read_connection read_billing_address get_bill_address read_billing_frequency read_billing_details read_billing_bills read_billing_bill read_billing_bill_id read_billing_bill_file read_account_running_balance read_customer_account_summary read_consumption_summary download_consumption_file read_consumption_averages get_consumption_averages read_hop_intervals_config read_hop_intervals read_hop_connection read_hop_specific_connection save_hop_connection save_hop_specific_connection read_outage_contact get_outage_contact_info_for_icp read_session read_session_data_login"
|
||||
|
@@ -10,7 +10,7 @@ import logging
|
||||
|
||||
from electrickiwi_api import ElectricKiwiApi
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
|
||||
from electrickiwi_api.model import AccountSummary, Hop, HopIntervals
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -34,7 +34,7 @@ class ElectricKiwiRuntimeData:
|
||||
type ElectricKiwiConfigEntry = ConfigEntry[ElectricKiwiRuntimeData]
|
||||
|
||||
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountSummary]):
|
||||
"""ElectricKiwi Account Data object."""
|
||||
|
||||
def __init__(
|
||||
@@ -51,13 +51,13 @@ class ElectricKiwiAccountDataCoordinator(DataUpdateCoordinator[AccountBalance]):
|
||||
name="Electric Kiwi Account Data",
|
||||
update_interval=ACCOUNT_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
|
||||
async def _async_update_data(self) -> AccountBalance:
|
||||
async def _async_update_data(self) -> AccountSummary:
|
||||
"""Fetch data from Account balance API endpoint."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._ek_api.get_account_balance()
|
||||
return await self.ek_api.get_account_summary()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -85,7 +85,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=HOP_SCAN_INTERVAL,
|
||||
)
|
||||
self._ek_api = ek_api
|
||||
self.ek_api = ek_api
|
||||
self.hop_intervals: HopIntervals | None = None
|
||||
|
||||
def get_hop_options(self) -> dict[str, int]:
|
||||
@@ -100,7 +100,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
async def async_update_hop(self, hop_interval: int) -> Hop:
|
||||
"""Update selected hop and data."""
|
||||
try:
|
||||
self.async_set_updated_data(await self._ek_api.post_hop(hop_interval))
|
||||
self.async_set_updated_data(await self.ek_api.post_hop(hop_interval))
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
@@ -118,7 +118,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
if self.hop_intervals is None:
|
||||
hop_intervals: HopIntervals = await self._ek_api.get_hop_intervals()
|
||||
hop_intervals: HopIntervals = await self.ek_api.get_hop_intervals()
|
||||
hop_intervals.intervals = OrderedDict(
|
||||
filter(
|
||||
lambda pair: pair[1].active == 1,
|
||||
@@ -127,7 +127,7 @@ class ElectricKiwiHOPDataCoordinator(DataUpdateCoordinator[Hop]):
|
||||
)
|
||||
|
||||
self.hop_intervals = hop_intervals
|
||||
return await self._ek_api.get_hop()
|
||||
return await self.ek_api.get_hop()
|
||||
except AuthException as auth_err:
|
||||
raise ConfigEntryAuthFailed from auth_err
|
||||
except ApiException as api_err:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/electric_kiwi",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["electrickiwi-api==0.8.5"]
|
||||
"requirements": ["electrickiwi-api==0.9.14"]
|
||||
}
|
||||
|
@@ -53,8 +53,8 @@ class ElectricKiwiSelectHOPEntity(
|
||||
"""Initialise the HOP selection entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
self.values_dict = coordinator.get_hop_options()
|
||||
|
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from electrickiwi_api.model import AccountBalance, Hop
|
||||
from electrickiwi_api.model import AccountSummary, Hop
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -39,7 +39,15 @@ ATTR_HOP_PERCENTAGE = "hop_percentage"
|
||||
class ElectricKiwiAccountSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Electric Kiwi sensor entity."""
|
||||
|
||||
value_func: Callable[[AccountBalance], float | datetime]
|
||||
value_func: Callable[[AccountSummary], float | datetime]
|
||||
|
||||
|
||||
def _get_hop_percentage(account_balance: AccountSummary) -> float:
|
||||
"""Return the hop percentage from account summary."""
|
||||
if power := account_balance.services.get("power"):
|
||||
if connection := power.connections[0]:
|
||||
return float(connection.hop_percentage)
|
||||
return 0.0
|
||||
|
||||
|
||||
ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
@@ -72,9 +80,7 @@ ACCOUNT_SENSOR_TYPES: tuple[ElectricKiwiAccountSensorEntityDescription, ...] = (
|
||||
translation_key="hop_power_savings",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_func=lambda account_balance: float(
|
||||
account_balance.connections[0].hop_percentage
|
||||
),
|
||||
value_func=_get_hop_percentage,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -165,8 +171,8 @@ class ElectricKiwiAccountEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
@@ -194,8 +200,8 @@ class ElectricKiwiHOPEntity(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator._ek_api.customer_number}" # noqa: SLF001
|
||||
f"_{coordinator._ek_api.connection_id}_{description.key}" # noqa: SLF001
|
||||
f"{coordinator.ek_api.customer_number}"
|
||||
f"_{coordinator.ek_api.electricity.identifier}_{description.key}"
|
||||
)
|
||||
self.entity_description = description
|
||||
|
||||
|
@@ -21,7 +21,8 @@
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]"
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"connection_error": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"requirements": ["pyenphase==1.23.1"],
|
||||
"requirements": ["pyenphase==1.25.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/fireservicerota",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyfireservicerota"],
|
||||
"requirements": ["pyfireservicerota==0.0.43"]
|
||||
"requirements": ["pyfireservicerota==0.0.46"]
|
||||
}
|
||||
|
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250205.0"]
|
||||
"requirements": ["home-assistant-frontend==20250210.0"]
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@ from collections.abc import Callable
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -49,6 +49,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -56,10 +58,15 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
hass.loop.call_soon(_notify_backup_listeners, hass)
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
return True
|
||||
|
||||
|
||||
def _notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
@@ -146,9 +146,10 @@ class DriveClient:
|
||||
backup.backup_id,
|
||||
backup_metadata,
|
||||
)
|
||||
await self._api.upload_file(
|
||||
await self._api.resumable_upload_file(
|
||||
backup_metadata,
|
||||
open_stream,
|
||||
backup.size,
|
||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
|
@@ -2,7 +2,10 @@
|
||||
|
||||
from homeassistant.components.application_credentials import AuthorizationServer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
AUTH_CALLBACK_PATH,
|
||||
MY_AUTH_CALLBACK_PATH,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
|
||||
@@ -15,9 +18,14 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
if "my" in hass.config.components:
|
||||
redirect_url = MY_AUTH_CALLBACK_PATH
|
||||
else:
|
||||
ha_host = hass.config.external_url or "https://YOUR_DOMAIN:PORT"
|
||||
redirect_url = f"{ha_host}{AUTH_CALLBACK_PATH}"
|
||||
return {
|
||||
"oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent",
|
||||
"more_info_url": "https://www.home-assistant.io/integrations/google_drive/",
|
||||
"oauth_creds_url": "https://console.cloud.google.com/apis/credentials",
|
||||
"redirect_url": config_entry_oauth2_flow.async_get_redirect_uri(hass),
|
||||
"redirect_url": redirect_url,
|
||||
}
|
||||
|
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_drive_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-google-drive-api==0.0.2"]
|
||||
"requirements": ["python-google-drive-api==0.1.0"]
|
||||
}
|
||||
|
@@ -38,6 +38,10 @@
|
||||
"local_name": "GV5126*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GV5179*",
|
||||
"connectable": false
|
||||
},
|
||||
{
|
||||
"local_name": "GVH5127*",
|
||||
"connectable": false
|
||||
@@ -131,5 +135,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-ble==0.42.0"]
|
||||
"requirements": ["govee-ble==0.43.0"]
|
||||
}
|
||||
|
@@ -11,6 +11,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Avatar,
|
||||
ContentData,
|
||||
Habitica,
|
||||
HabiticaException,
|
||||
@@ -19,7 +20,6 @@ from habiticalib import (
|
||||
TaskFilter,
|
||||
TooManyRequestsError,
|
||||
UserData,
|
||||
UserStyles,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -159,12 +159,10 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
||||
else:
|
||||
await self.async_request_refresh()
|
||||
|
||||
async def generate_avatar(self, user_styles: UserStyles) -> bytes:
|
||||
async def generate_avatar(self, avatar: Avatar) -> bytes:
|
||||
"""Generate Avatar."""
|
||||
|
||||
avatar = BytesIO()
|
||||
await self.habitica.generate_avatar(
|
||||
fp=avatar, user_styles=user_styles, fmt="PNG"
|
||||
)
|
||||
png = BytesIO()
|
||||
await self.habitica.generate_avatar(fp=png, avatar=avatar, fmt="PNG")
|
||||
|
||||
return avatar.getvalue()
|
||||
return png.getvalue()
|
||||
|
@@ -23,5 +23,5 @@ async def async_get_config_entry_diagnostics(
|
||||
CONF_URL: config_entry.data[CONF_URL],
|
||||
CONF_API_USER: config_entry.data[CONF_API_USER],
|
||||
},
|
||||
"habitica_data": habitica_data.to_dict()["data"],
|
||||
"habitica_data": habitica_data.to_dict(omit_none=False)["data"],
|
||||
}
|
||||
|
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from enum import StrEnum
|
||||
|
||||
from habiticalib import UserStyles
|
||||
from habiticalib import Avatar, extract_avatar
|
||||
|
||||
from homeassistant.components.image import ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -45,7 +44,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
translation_key=HabiticaImageEntity.AVATAR,
|
||||
)
|
||||
_attr_content_type = "image/png"
|
||||
_current_appearance: UserStyles | None = None
|
||||
_current_appearance: Avatar | None = None
|
||||
_cache: bytes | None = None
|
||||
|
||||
def __init__(
|
||||
@@ -60,7 +59,7 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||
new_appearance = UserStyles.from_dict(asdict(self.coordinator.data.user))
|
||||
new_appearance = extract_avatar(self.coordinator.data.user)
|
||||
|
||||
if self._current_appearance != new_appearance:
|
||||
self._current_appearance = new_appearance
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["habiticalib"],
|
||||
"requirements": ["habiticalib==0.3.4"]
|
||||
"requirements": ["habiticalib==0.3.7"]
|
||||
}
|
||||
|
@@ -77,7 +77,7 @@ SERVICE_API_CALL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_SKILL): cv.string,
|
||||
vol.Optional(ATTR_TASK): cv.string,
|
||||
}
|
||||
@@ -85,12 +85,12 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
}
|
||||
)
|
||||
SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_DIRECTION): cv.string,
|
||||
}
|
||||
@@ -98,7 +98,7 @@ SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_ITEM): cv.string,
|
||||
vol.Required(ATTR_TARGET): cv.string,
|
||||
}
|
||||
@@ -106,7 +106,7 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(ATTR_TYPE): vol.All(
|
||||
cv.ensure_list, [vol.All(vol.Upper, vol.In({x.name for x in TaskType}))]
|
||||
),
|
||||
@@ -510,7 +510,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
or (task.notes and keyword in task.notes.lower())
|
||||
or any(keyword in item.text.lower() for item in task.checklist)
|
||||
]
|
||||
result: dict[str, Any] = {"tasks": response}
|
||||
result: dict[str, Any] = {
|
||||
"tasks": [task.to_dict(omit_none=False) for task in response]
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
hass.services.async_register(
|
||||
|
@@ -37,11 +37,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
entry.entry_id
|
||||
):
|
||||
for domain, player_id in device.identifiers:
|
||||
if domain == DOMAIN and not isinstance(player_id, str):
|
||||
device_registry.async_update_device( # type: ignore[unreachable]
|
||||
device.id, new_identifiers={(DOMAIN, str(player_id))}
|
||||
for ident in device.identifiers:
|
||||
if ident[0] != DOMAIN or isinstance(ident[1], str):
|
||||
continue
|
||||
|
||||
player_id = int(ident[1]) # type: ignore[unreachable]
|
||||
|
||||
# Create set of identifiers excluding this integration
|
||||
identifiers = {ident for ident in device.identifiers if ident[0] != DOMAIN}
|
||||
migrated_identifiers = {(DOMAIN, str(player_id))}
|
||||
# Add migrated if not already present in another device, which occurs if the user downgraded and then upgraded
|
||||
if not device_registry.async_get_device(migrated_identifiers):
|
||||
identifiers.update(migrated_identifiers)
|
||||
if len(identifiers) > 0:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_identifiers=identifiers
|
||||
)
|
||||
else:
|
||||
device_registry.async_remove_device(device.id)
|
||||
break
|
||||
|
||||
coordinator = HeosCoordinator(hass, entry)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyheos"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyheos==1.0.1"],
|
||||
"requirements": ["pyheos==1.0.2"],
|
||||
"single_config_entry": true,
|
||||
"ssdp": [
|
||||
{
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.65", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.66", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hydrawise",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pydrawise"],
|
||||
"requirements": ["pydrawise==2025.1.0"]
|
||||
"requirements": ["pydrawise==2025.2.0"]
|
||||
}
|
||||
|
@@ -10,8 +10,8 @@ from lacrosse_view import HTTPError, LaCrosse, Location, LoginError, Sensor
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import SCAN_INTERVAL
|
||||
|
||||
@@ -26,6 +26,7 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
name: str
|
||||
id: str
|
||||
hass: HomeAssistant
|
||||
devices: list[Sensor] | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -60,24 +61,34 @@ class LaCrosseUpdateCoordinator(DataUpdateCoordinator[list[Sensor]]):
|
||||
except LoginError as error:
|
||||
raise ConfigEntryAuthFailed from error
|
||||
|
||||
if self.devices is None:
|
||||
_LOGGER.debug("Getting devices")
|
||||
try:
|
||||
self.devices = await self.api.get_devices(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
try:
|
||||
# Fetch last hour of data
|
||||
sensors = await self.api.get_sensors(
|
||||
location=Location(id=self.id, name=self.name),
|
||||
tz=self.hass.config.time_zone,
|
||||
start=str(now - 3600),
|
||||
end=str(now),
|
||||
)
|
||||
except HTTPError as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
for sensor in self.devices:
|
||||
sensor.data = (
|
||||
await self.api.get_sensor_status(
|
||||
sensor=sensor,
|
||||
tz=self.hass.config.time_zone,
|
||||
)
|
||||
)["data"]["current"]
|
||||
_LOGGER.debug("Got data: %s", sensor.data)
|
||||
|
||||
_LOGGER.debug("Got data: %s", sensors)
|
||||
except HTTPError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
# Verify that we have permission to read the sensors
|
||||
for sensor in sensors:
|
||||
for sensor in self.devices:
|
||||
if not sensor.permissions.get("read", False):
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"This account does not have permission to read {sensor.name}"
|
||||
)
|
||||
|
||||
return sensors
|
||||
return self.devices
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/lacrosse_view",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["lacrosse_view"],
|
||||
"requirements": ["lacrosse-view==1.0.4"]
|
||||
"requirements": ["lacrosse-view==1.1.1"]
|
||||
}
|
||||
|
@@ -45,10 +45,10 @@ class LaCrosseSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
def get_value(sensor: Sensor, field: str) -> float | int | str | None:
|
||||
"""Get the value of a sensor field."""
|
||||
field_data = sensor.data.get(field)
|
||||
field_data = sensor.data.get(field) if sensor.data is not None else None
|
||||
if field_data is None:
|
||||
return None
|
||||
value = field_data["values"][-1]["s"]
|
||||
value = field_data["spot"]["value"]
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
@@ -178,7 +178,7 @@ async def async_setup_entry(
|
||||
continue
|
||||
|
||||
# if the API returns a different unit of measurement from the description, update it
|
||||
if sensor.data.get(field) is not None:
|
||||
if sensor.data is not None and sensor.data.get(field) is not None:
|
||||
native_unit_of_measurement = UNIT_OF_MEASUREMENT_MAP.get(
|
||||
sensor.data[field].get("unit")
|
||||
)
|
||||
@@ -240,7 +240,9 @@ class LaCrosseViewSensor(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
data = self.coordinator.data[self.index].data
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.key in self.coordinator.data[self.index].data
|
||||
and data is not None
|
||||
and self.entity_description.key in data
|
||||
)
|
||||
|
@@ -277,20 +277,6 @@ FOUR_GROUP_REMOTE_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP = {
|
||||
"button_0": 2,
|
||||
"button_2": 4,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP = {
|
||||
"button_0": 0,
|
||||
"button_2": 2,
|
||||
}
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA = LUTRON_BUTTON_TRIGGER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SUBTYPE): vol.In(PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico2Button": PICO_2_BUTTON_TRIGGER_SCHEMA,
|
||||
@@ -302,7 +288,6 @@ DEVICE_TYPE_SCHEMA_MAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
@@ -315,7 +300,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LIP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LIP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LIP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LIP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LIP,
|
||||
}
|
||||
|
||||
DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
@@ -328,7 +312,6 @@ DEVICE_TYPE_SUBTYPE_MAP_TO_LEAP = {
|
||||
"Pico4ButtonZone": PICO_4_BUTTON_ZONE_BUTTON_TYPES_TO_LEAP,
|
||||
"Pico4Button2Group": PICO_4_BUTTON_2_GROUP_BUTTON_TYPES_TO_LEAP,
|
||||
"FourGroupRemote": FOUR_GROUP_REMOTE_BUTTON_TYPES_TO_LEAP,
|
||||
"PaddleSwitchPico": PADDLE_SWITCH_PICO_BUTTON_TYPES_TO_LEAP,
|
||||
}
|
||||
|
||||
LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP: dict[str, dict[int, str]] = {
|
||||
@@ -343,7 +326,6 @@ TRIGGER_SCHEMA = vol.Any(
|
||||
PICO_4_BUTTON_ZONE_TRIGGER_SCHEMA,
|
||||
PICO_4_BUTTON_2_GROUP_TRIGGER_SCHEMA,
|
||||
FOUR_GROUP_REMOTE_TRIGGER_SCHEMA,
|
||||
PADDLE_SWITCH_PICO_TRIGGER_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -105,10 +105,8 @@ class MillHeater(MillBaseEntity, ClimateEntity):
|
||||
self, coordinator: MillDataUpdateCoordinator, device: mill.Heater
|
||||
) -> None:
|
||||
"""Initialize the thermostat."""
|
||||
|
||||
super().__init__(coordinator, device)
|
||||
self._attr_unique_id = device.device_id
|
||||
self._update_attr(device)
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
|
||||
from mill import Heater, MillDevice
|
||||
from mill import MillDevice
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -45,7 +45,7 @@ class MillBaseEntity(CoordinatorEntity[MillDataUpdateCoordinator]):
|
||||
|
||||
@abstractmethod
|
||||
@callback
|
||||
def _update_attr(self, device: MillDevice | Heater) -> None:
|
||||
def _update_attr(self, device: MillDevice) -> None:
|
||||
"""Update the attribute of the entity."""
|
||||
|
||||
@property
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from mill import MillDevice
|
||||
from mill import Heater, MillDevice
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,6 +27,7 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
MillNumber(mill_data_coordinator, mill_device)
|
||||
for mill_device in mill_data_coordinator.data.values()
|
||||
if isinstance(mill_device, Heater)
|
||||
)
|
||||
|
||||
|
||||
@@ -45,9 +46,8 @@ class MillNumber(MillBaseEntity, NumberEntity):
|
||||
mill_device: MillDevice,
|
||||
) -> None:
|
||||
"""Initialize the number."""
|
||||
super().__init__(coordinator, mill_device)
|
||||
self._attr_unique_id = f"{mill_device.device_id}_max_heating_power"
|
||||
self._update_attr(mill_device)
|
||||
super().__init__(coordinator, mill_device)
|
||||
|
||||
@callback
|
||||
def _update_attr(self, device: MillDevice) -> None:
|
||||
|
@@ -192,9 +192,9 @@ class MillSensor(MillBaseEntity, SensorEntity):
|
||||
mill_device: mill.Socket | mill.Heater,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, mill_device)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{mill_device.device_id}_{entity_description.key}"
|
||||
super().__init__(coordinator, mill_device)
|
||||
|
||||
@callback
|
||||
def _update_attr(self, device):
|
||||
|
@@ -236,7 +236,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
MQTT_PUBLISH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TOPIC): valid_publish_topic,
|
||||
vol.Required(ATTR_PAYLOAD): cv.string,
|
||||
vol.Required(ATTR_PAYLOAD, default=None): vol.Any(cv.string, None),
|
||||
vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean,
|
||||
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema,
|
||||
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
|
||||
|
@@ -8,7 +8,6 @@ publish:
|
||||
selector:
|
||||
text:
|
||||
payload:
|
||||
required: true
|
||||
example: "The temperature is {{ states('sensor.temperature') }}"
|
||||
selector:
|
||||
template:
|
||||
|
@@ -246,11 +246,7 @@
|
||||
},
|
||||
"payload": {
|
||||
"name": "Payload",
|
||||
"description": "The payload to publish."
|
||||
},
|
||||
"payload_template": {
|
||||
"name": "Payload template",
|
||||
"description": "Template to render as a payload value. If a payload is provided, the template is ignored."
|
||||
"description": "The payload to publish. Publishes an empty message if not provided."
|
||||
},
|
||||
"qos": {
|
||||
"name": "QoS",
|
||||
|
@@ -8,7 +8,7 @@ from collections.abc import Awaitable, Callable
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp import ClientError, ClientResponseError, web
|
||||
from google_nest_sdm.camera_traits import CameraClipPreviewTrait
|
||||
from google_nest_sdm.device import Device
|
||||
from google_nest_sdm.event import EventMessage
|
||||
@@ -198,7 +198,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
|
||||
entry, unique_id=entry.data[CONF_PROJECT_ID]
|
||||
)
|
||||
|
||||
subscriber = await api.new_subscriber(hass, entry)
|
||||
auth = await api.new_auth(hass, entry)
|
||||
try:
|
||||
await auth.async_get_access_token()
|
||||
except ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
subscriber = await api.new_subscriber(hass, entry, auth)
|
||||
if not subscriber:
|
||||
return False
|
||||
# Keep media for last N events in memory
|
||||
|
@@ -50,13 +50,14 @@ class AsyncConfigEntryAuth(AbstractAuth):
|
||||
return cast(str, self._oauth_session.token["access_token"])
|
||||
|
||||
async def async_get_creds(self) -> Credentials:
|
||||
"""Return an OAuth credential for Pub/Sub Subscriber."""
|
||||
# We don't have a way for Home Assistant to refresh creds on behalf
|
||||
# of the google pub/sub subscriber. Instead, build a full
|
||||
# Credentials object with enough information for the subscriber to
|
||||
# handle this on its own. We purposely don't refresh the token here
|
||||
# even when it is expired to fully hand off this responsibility and
|
||||
# know it is working at startup (then if not, fail loudly).
|
||||
"""Return an OAuth credential for Pub/Sub Subscriber.
|
||||
|
||||
The subscriber will call this when connecting to the stream to refresh
|
||||
the token. We construct a credentials object using the underlying
|
||||
OAuth2Session since the subscriber may expect the expiry fields to
|
||||
be present.
|
||||
"""
|
||||
await self.async_get_access_token()
|
||||
token = self._oauth_session.token
|
||||
creds = Credentials( # type: ignore[no-untyped-call]
|
||||
token=token["access_token"],
|
||||
@@ -101,9 +102,7 @@ class AccessTokenAuthImpl(AbstractAuth):
|
||||
)
|
||||
|
||||
|
||||
async def new_subscriber(
|
||||
hass: HomeAssistant, entry: NestConfigEntry
|
||||
) -> GoogleNestSubscriber | None:
|
||||
async def new_auth(hass: HomeAssistant, entry: NestConfigEntry) -> AbstractAuth:
|
||||
"""Create a GoogleNestSubscriber."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
@@ -114,14 +113,22 @@ async def new_subscriber(
|
||||
implementation, config_entry_oauth2_flow.LocalOAuth2Implementation
|
||||
):
|
||||
raise TypeError(f"Unexpected auth implementation {implementation}")
|
||||
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
|
||||
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
|
||||
auth = AsyncConfigEntryAuth(
|
||||
return AsyncConfigEntryAuth(
|
||||
aiohttp_client.async_get_clientsession(hass),
|
||||
config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation),
|
||||
implementation.client_id,
|
||||
implementation.client_secret,
|
||||
)
|
||||
|
||||
|
||||
async def new_subscriber(
|
||||
hass: HomeAssistant,
|
||||
entry: NestConfigEntry,
|
||||
auth: AbstractAuth,
|
||||
) -> GoogleNestSubscriber:
|
||||
"""Create a GoogleNestSubscriber."""
|
||||
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
|
||||
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
|
||||
return GoogleNestSubscriber(auth, entry.data[CONF_PROJECT_ID], subscription_name)
|
||||
|
||||
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==7.1.1"]
|
||||
"requirements": ["google-nest-sdm==7.1.3"]
|
||||
}
|
||||
|
@@ -41,7 +41,7 @@ ATTR_CURRENCY = "currency"
|
||||
SERVICE_GET_PRICES_FOR_DATE = "get_prices_for_date"
|
||||
SERVICE_GET_PRICES_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Required(ATTR_DATE): cv.date,
|
||||
vol.Optional(ATTR_AREAS): vol.All(vol.In(list(AREAS)), cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_CURRENCY): vol.All(
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ohme==1.2.8"]
|
||||
"requirements": ["ohme==1.2.9"]
|
||||
}
|
||||
|
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
@@ -13,6 +15,7 @@ from onedrive_personal_sdk.exceptions import (
|
||||
HttpRequestException,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
@@ -45,7 +48,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool:
|
||||
"""Set up OneDrive from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
@@ -89,6 +91,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) ->
|
||||
backup_folder_id=backup_folder.id,
|
||||
)
|
||||
|
||||
try:
|
||||
await _migrate_backup_files(client, backup_folder.id)
|
||||
except OneDriveException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_migrate_files",
|
||||
) from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
|
||||
return True
|
||||
@@ -108,3 +118,34 @@ def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
|
||||
async def _migrate_backup_files(client: OneDriveClient, backup_folder_id: str) -> None:
|
||||
"""Migrate backup files to metadata version 2."""
|
||||
files = await client.list_drive_items(backup_folder_id)
|
||||
for file in files:
|
||||
if file.description and '"metadata_version": 1' in (
|
||||
metadata_json := unescape(file.description)
|
||||
):
|
||||
metadata = loads(metadata_json)
|
||||
del metadata["metadata_version"]
|
||||
metadata_filename = file.name.rsplit(".", 1)[0] + ".metadata.json"
|
||||
metadata_file = await client.upload_file(
|
||||
backup_folder_id,
|
||||
metadata_filename,
|
||||
dumps(metadata),
|
||||
)
|
||||
metadata_description = {
|
||||
"metadata_version": 2,
|
||||
"backup_id": metadata["backup_id"],
|
||||
"backup_file_id": file.id,
|
||||
}
|
||||
await client.update_drive_item(
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
await client.update_drive_item(
|
||||
path_or_id=file.id,
|
||||
data=ItemUpdate(description=""),
|
||||
)
|
||||
_LOGGER.debug("Migrated backup file %s", file.name)
|
||||
|
@@ -4,8 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import html
|
||||
import json
|
||||
from html import unescape
|
||||
from json import dumps, loads
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
@@ -34,6 +34,7 @@ from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
@@ -120,11 +121,19 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
metadata_item = await self._find_item_by_backup_id(backup_id)
|
||||
if (
|
||||
metadata_item is None
|
||||
or metadata_item.description is None
|
||||
or "backup_file_id" not in metadata_item.description
|
||||
):
|
||||
raise BackupAgentError("Backup not found")
|
||||
|
||||
stream = await self._client.download_drive_item(item.id, timeout=TIMEOUT)
|
||||
metadata_info = loads(unescape(metadata_item.description))
|
||||
|
||||
stream = await self._client.download_drive_item(
|
||||
metadata_info["backup_file_id"], timeout=TIMEOUT
|
||||
)
|
||||
return stream.iter_chunked(1024)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -136,15 +145,15 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
filename = suggested_filename(backup)
|
||||
file = FileInfo(
|
||||
suggested_filename(backup),
|
||||
filename,
|
||||
backup.size,
|
||||
self._folder_id,
|
||||
await open_stream(),
|
||||
)
|
||||
try:
|
||||
item = await LargeFileUploadClient.upload(
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
@@ -152,15 +161,25 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
"Hash validation failed, backup file might be corrupt"
|
||||
) from err
|
||||
|
||||
# store metadata in description
|
||||
backup_dict = backup.as_dict()
|
||||
backup_dict["metadata_version"] = 1 # version of the backup metadata
|
||||
description = json.dumps(backup_dict)
|
||||
# store metadata in metadata file
|
||||
description = dumps(backup.as_dict())
|
||||
_LOGGER.debug("Creating metadata: %s", description)
|
||||
metadata_filename = filename.rsplit(".", 1)[0] + ".metadata.json"
|
||||
metadata_file = await self._client.upload_file(
|
||||
self._folder_id,
|
||||
metadata_filename,
|
||||
description,
|
||||
)
|
||||
|
||||
# add metadata to the metadata file
|
||||
metadata_description = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_file_id": backup_file.id,
|
||||
}
|
||||
await self._client.update_drive_item(
|
||||
path_or_id=item.id,
|
||||
data=ItemUpdate(description=description),
|
||||
path_or_id=metadata_file.id,
|
||||
data=ItemUpdate(description=dumps(metadata_description)),
|
||||
)
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -170,18 +189,28 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
if item is None:
|
||||
metadata_item = await self._find_item_by_backup_id(backup_id)
|
||||
if (
|
||||
metadata_item is None
|
||||
or metadata_item.description is None
|
||||
or "backup_file_id" not in metadata_item.description
|
||||
):
|
||||
return
|
||||
await self._client.delete_drive_item(item.id)
|
||||
metadata_info = loads(unescape(metadata_item.description))
|
||||
|
||||
await self._client.delete_drive_item(metadata_info["backup_file_id"])
|
||||
await self._client.delete_drive_item(metadata_item.id)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
items = await self._client.list_drive_items(self._folder_id)
|
||||
return [
|
||||
self._backup_from_description(item.description)
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and "homeassistant_version" in item.description
|
||||
await self._download_backup_metadata(item.id)
|
||||
for item in items
|
||||
if item.description
|
||||
and "backup_id" in item.description
|
||||
and f'"metadata_version": {METADATA_VERSION}' in unescape(item.description)
|
||||
]
|
||||
|
||||
@handle_backup_errors
|
||||
@@ -189,19 +218,11 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
item = await self._find_item_by_backup_id(backup_id)
|
||||
return (
|
||||
self._backup_from_description(item.description)
|
||||
if item and item.description
|
||||
else None
|
||||
)
|
||||
metadata_file = await self._find_item_by_backup_id(backup_id)
|
||||
if metadata_file is None or metadata_file.description is None:
|
||||
return None
|
||||
|
||||
def _backup_from_description(self, description: str) -> AgentBackup:
|
||||
"""Create a backup object from a description."""
|
||||
description = html.unescape(
|
||||
description
|
||||
) # OneDrive encodes the description on save automatically
|
||||
return AgentBackup.from_dict(json.loads(description))
|
||||
return await self._download_backup_metadata(metadata_file.id)
|
||||
|
||||
async def _find_item_by_backup_id(self, backup_id: str) -> File | Folder | None:
|
||||
"""Find an item by backup ID."""
|
||||
@@ -209,7 +230,15 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
(
|
||||
item
|
||||
for item in await self._client.list_drive_items(self._folder_id)
|
||||
if item.description and backup_id in item.description
|
||||
if item.description
|
||||
and backup_id in item.description
|
||||
and f'"metadata_version": {METADATA_VERSION}'
|
||||
in unescape(item.description)
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
async def _download_backup_metadata(self, item_id: str) -> AgentBackup:
|
||||
metadata_stream = await self._client.download_drive_item(item_id)
|
||||
metadata_json = loads(await metadata_stream.read())
|
||||
return AgentBackup.from_dict(metadata_json)
|
||||
|
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.8"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.10"]
|
||||
}
|
||||
|
@@ -35,6 +35,9 @@
|
||||
},
|
||||
"failed_to_get_folder": {
|
||||
"message": "Failed to get {folder} folder"
|
||||
},
|
||||
"failed_to_migrate_files": {
|
||||
"message": "Failed to migrate metadata to separate files"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -92,7 +92,7 @@ SUPPORT_ONKYO = (
|
||||
DEFAULT_PLAYABLE_SOURCES = (
|
||||
InputSource.from_meaning("FM"),
|
||||
InputSource.from_meaning("AM"),
|
||||
InputSource.from_meaning("TUNER"),
|
||||
InputSource.from_meaning("DAB"),
|
||||
)
|
||||
|
||||
ATTR_PRESET = "preset"
|
||||
|
@@ -27,7 +27,7 @@ REGISTERED_NOTIFICATIONS = (
|
||||
JSON_PAYLOAD = (
|
||||
'"{\\"notification_type\\":\\"{{notification_type}}\\",\\"subject\\":\\"{{subject}'
|
||||
'}\\",\\"message\\":\\"{{message}}\\",\\"image\\":\\"{{image}}\\",\\"{{media}}\\":'
|
||||
'{\\"media_type\\":\\"{{media_type}}\\",\\"tmdb_idd\\":\\"{{media_tmdbid}}\\",\\"t'
|
||||
'{\\"media_type\\":\\"{{media_type}}\\",\\"tmdb_id\\":\\"{{media_tmdbid}}\\",\\"t'
|
||||
'vdb_id\\":\\"{{media_tvdbid}}\\",\\"status\\":\\"{{media_status}}\\",\\"status4k'
|
||||
'\\":\\"{{media_status4k}}\\"},\\"{{request}}\\":{\\"request_id\\":\\"{{request_id'
|
||||
'}}\\",\\"requested_by_email\\":\\"{{requestedBy_email}}\\",\\"requested_by_userna'
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.11.9"]
|
||||
"requirements": ["reolink-aio==0.11.10"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sentry",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["sentry-sdk==1.40.3"]
|
||||
"requirements": ["sentry-sdk==1.45.1"]
|
||||
}
|
||||
|
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioshelly"],
|
||||
"requirements": ["aioshelly==12.3.2"],
|
||||
"requirements": ["aioshelly==12.4.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_http._tcp.local.",
|
||||
|
@@ -139,6 +139,24 @@ class RpcBluTrvNumber(RpcNumber):
|
||||
)
|
||||
|
||||
|
||||
class RpcBluTrvExtTempNumber(RpcBluTrvNumber):
|
||||
"""Represent a RPC BluTrv External Temperature number."""
|
||||
|
||||
_reported_value: float | None = None
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return value of number."""
|
||||
return self._reported_value
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Change the value."""
|
||||
await super().async_set_native_value(value)
|
||||
|
||||
self._reported_value = value
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
NUMBERS: dict[tuple[str, str], BlockNumberDescription] = {
|
||||
("device", "valvePos"): BlockNumberDescription(
|
||||
key="device|valvepos",
|
||||
@@ -175,7 +193,7 @@ RPC_NUMBERS: Final = {
|
||||
"method": "Trv.SetExternalTemperature",
|
||||
"params": {"id": 0, "t_C": value},
|
||||
},
|
||||
entity_class=RpcBluTrvNumber,
|
||||
entity_class=RpcBluTrvExtTempNumber,
|
||||
),
|
||||
"number": RpcNumberDescription(
|
||||
key="number",
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/synology_dsm",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["synology_dsm"],
|
||||
"requirements": ["py-synologydsm-api==2.6.0"],
|
||||
"requirements": ["py-synologydsm-api==2.6.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Synology",
|
||||
|
@@ -175,6 +175,7 @@ BASE_SERVICE_SCHEMA = vol.Schema(
|
||||
vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list,
|
||||
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(ATTR_MESSAGE_TAG): cv.string,
|
||||
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@@ -216,6 +217,7 @@ SERVICE_SCHEMA_SEND_POLL = vol.Schema(
|
||||
vol.Optional(ATTR_ALLOWS_MULTIPLE_ANSWERS, default=False): cv.boolean,
|
||||
vol.Optional(ATTR_DISABLE_NOTIF): cv.boolean,
|
||||
vol.Optional(ATTR_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -754,7 +756,8 @@ class TelegramNotificationService:
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
msg_ids[chat_id] = msg.id
|
||||
if msg is not None:
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
|
||||
async def delete_message(self, chat_id=None, context=None, **kwargs):
|
||||
|
@@ -9,6 +9,7 @@ import logging
|
||||
from kasa import AuthenticationError, Credentials, Device, KasaException
|
||||
from kasa.iot import IotStrip
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -123,11 +124,14 @@ class TPLinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
def get_child_coordinator(
|
||||
self,
|
||||
child: Device,
|
||||
platform_domain: str,
|
||||
) -> TPLinkDataUpdateCoordinator:
|
||||
"""Get separate child coordinator for a device or self if not needed."""
|
||||
# The iot HS300 allows a limited number of concurrent requests and fetching the
|
||||
# emeter information requires separate ones so create child coordinators here.
|
||||
if isinstance(self.device, IotStrip):
|
||||
# This does not happen for switches as the state is available on the
|
||||
# parent device info.
|
||||
if isinstance(self.device, IotStrip) and platform_domain != SWITCH_DOMAIN:
|
||||
if not (child_coordinator := self._child_coordinators.get(child.device_id)):
|
||||
# The child coordinators only update energy data so we can
|
||||
# set a longer update interval to avoid flooding the device
|
||||
|
@@ -508,7 +508,9 @@ class CoordinatedTPLinkFeatureEntity(CoordinatedTPLinkEntity, ABC):
|
||||
)
|
||||
|
||||
for child in children:
|
||||
child_coordinator = coordinator.get_child_coordinator(child)
|
||||
child_coordinator = coordinator.get_child_coordinator(
|
||||
child, platform_domain
|
||||
)
|
||||
|
||||
child_entities = cls._entities_for_device(
|
||||
hass,
|
||||
@@ -651,7 +653,9 @@ class CoordinatedTPLinkModuleEntity(CoordinatedTPLinkEntity, ABC):
|
||||
device.host,
|
||||
)
|
||||
for child in children:
|
||||
child_coordinator = coordinator.get_child_coordinator(child)
|
||||
child_coordinator = coordinator.get_child_coordinator(
|
||||
child, platform_domain
|
||||
)
|
||||
|
||||
child_entities: list[_E] = cls._entities_for_device(
|
||||
hass,
|
||||
|
@@ -78,7 +78,9 @@ MIGRATION_NAME_TO_KEY = {
|
||||
|
||||
SERVICE_BASE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector(),
|
||||
vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector(
|
||||
{"integration": DOMAIN}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
@@ -31,6 +31,7 @@ WEBOSTV_EXCEPTIONS = (
|
||||
WebOsTvCommandError,
|
||||
aiohttp.ClientConnectorError,
|
||||
aiohttp.ServerDisconnectedError,
|
||||
aiohttp.WSMessageTypeError,
|
||||
asyncio.CancelledError,
|
||||
asyncio.TimeoutError,
|
||||
)
|
||||
|
@@ -125,7 +125,7 @@ def cmd[_R, **_P](
|
||||
self: LgWebOSMediaPlayerEntity, *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> _R:
|
||||
"""Wrap all command methods."""
|
||||
if self.state is MediaPlayerState.OFF:
|
||||
if self.state is MediaPlayerState.OFF and func.__name__ != "async_turn_off":
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_off",
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.65"]
|
||||
"requirements": ["holidays==0.66"]
|
||||
}
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["zeroconf"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["zeroconf==0.143.0"]
|
||||
"requirements": ["zeroconf==0.144.1"]
|
||||
}
|
||||
|
@@ -113,9 +113,14 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
|
||||
except HomeAssistantError:
|
||||
pass
|
||||
else:
|
||||
yellow_radio = next(p for p in ports if p.device == "/dev/ttyAMA1")
|
||||
yellow_radio.description = "Yellow Zigbee module"
|
||||
yellow_radio.manufacturer = "Nabu Casa"
|
||||
# PySerial does not properly handle the Yellow's serial port with the CM5
|
||||
# so we manually include it
|
||||
port = ListPortInfo(device="/dev/ttyAMA1", skip_link_detection=True)
|
||||
port.description = "Yellow Zigbee module"
|
||||
port.manufacturer = "Nabu Casa"
|
||||
|
||||
ports = [p for p in ports if not p.device.startswith("/dev/ttyAMA")]
|
||||
ports.insert(0, port)
|
||||
|
||||
if is_hassio(hass):
|
||||
# Present the multi-PAN addon as a setup option, if it's available
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["zha==0.0.47"],
|
||||
"requirements": ["zha==0.0.48"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
@@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2025
|
||||
MINOR_VERSION: Final = 2
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)
|
||||
|
5
homeassistant/generated/bluetooth.py
generated
5
homeassistant/generated/bluetooth.py
generated
@@ -187,6 +187,11 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
|
||||
"domain": "govee_ble",
|
||||
"local_name": "GV5126*",
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "govee_ble",
|
||||
"local_name": "GV5179*",
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "govee_ble",
|
||||
|
@@ -15,7 +15,7 @@ import aiohttp
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout
|
||||
from aiohttp_asyncmdnsresolver.api import AsyncMDNSResolver
|
||||
from aiohttp_asyncmdnsresolver.api import AsyncDualMDNSResolver
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import zeroconf
|
||||
@@ -377,5 +377,5 @@ def _async_get_connector(
|
||||
|
||||
|
||||
@callback
|
||||
def _async_make_resolver(hass: HomeAssistant) -> AsyncMDNSResolver:
|
||||
return AsyncMDNSResolver(async_zeroconf=zeroconf.async_get_async_zeroconf(hass))
|
||||
def _async_make_resolver(hass: HomeAssistant) -> AsyncDualMDNSResolver:
|
||||
return AsyncDualMDNSResolver(async_zeroconf=zeroconf.async_get_async_zeroconf(hass))
|
||||
|
@@ -4,9 +4,9 @@ aiodhcpwatcher==1.0.3
|
||||
aiodiscover==2.1.0
|
||||
aiodns==3.2.0
|
||||
aiohasupervisor==0.3.0
|
||||
aiohttp-asyncmdnsresolver==0.0.3
|
||||
aiohttp-asyncmdnsresolver==0.1.0
|
||||
aiohttp-fast-zlib==0.2.0
|
||||
aiohttp==3.11.11
|
||||
aiohttp==3.11.12
|
||||
aiohttp_cors==0.7.0
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.1
|
||||
@@ -28,16 +28,16 @@ cached-ipaddress==0.8.0
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
cryptography==44.0.0
|
||||
cryptography==44.0.1
|
||||
dbus-fast==2.33.0
|
||||
fnv-hash-fast==1.2.2
|
||||
go2rtc-client==0.1.2
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.21.1
|
||||
hass-nabucasa==0.88.1
|
||||
hass-nabucasa==0.90.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.0
|
||||
home-assistant-frontend==20250205.0
|
||||
home-assistant-frontend==20250210.0
|
||||
home-assistant-intents==2025.2.5
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
@@ -73,7 +73,7 @@ voluptuous-serialize==2.6.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.18.3
|
||||
zeroconf==0.143.0
|
||||
zeroconf==0.144.1
|
||||
|
||||
# Constrain pycryptodome to avoid vulnerability
|
||||
# see https://github.com/home-assistant/core/pull/16238
|
||||
|
@@ -132,7 +132,13 @@ def async_set_domains_to_be_loaded(hass: core.HomeAssistant, domains: set[str])
|
||||
- Keep track of domains which will load but have not yet finished loading
|
||||
"""
|
||||
setup_done_futures = hass.data.setdefault(DATA_SETUP_DONE, {})
|
||||
setup_done_futures.update({domain: hass.loop.create_future() for domain in domains})
|
||||
setup_futures = hass.data.setdefault(DATA_SETUP, {})
|
||||
old_domains = set(setup_futures) | set(setup_done_futures) | hass.config.components
|
||||
if overlap := old_domains & domains:
|
||||
_LOGGER.debug("Domains to be loaded %s already loaded or pending", overlap)
|
||||
setup_done_futures.update(
|
||||
{domain: hass.loop.create_future() for domain in domains - old_domains}
|
||||
)
|
||||
|
||||
|
||||
def setup_component(hass: core.HomeAssistant, domain: str, config: ConfigType) -> bool:
|
||||
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2025.2.0"
|
||||
version = "2025.2.3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
@@ -28,10 +28,10 @@ dependencies = [
|
||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.3.0",
|
||||
"aiohttp==3.11.11",
|
||||
"aiohttp==3.11.12",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp-fast-zlib==0.2.0",
|
||||
"aiohttp-asyncmdnsresolver==0.0.3",
|
||||
"aiohttp-asyncmdnsresolver==0.1.0",
|
||||
"aiozoneinfo==0.2.1",
|
||||
"astral==2.2",
|
||||
"async-interrupt==1.2.0",
|
||||
@@ -46,7 +46,7 @@ dependencies = [
|
||||
"fnv-hash-fast==1.2.2",
|
||||
# hass-nabucasa is imported by helpers which don't depend on the cloud
|
||||
# integration
|
||||
"hass-nabucasa==0.88.1",
|
||||
"hass-nabucasa==0.90.0",
|
||||
# When bumping httpx, please check the version pins of
|
||||
# httpcore, anyio, and h11 in gen_requirements_all
|
||||
"httpx==0.28.1",
|
||||
@@ -56,7 +56,7 @@ dependencies = [
|
||||
"lru-dict==1.3.0",
|
||||
"PyJWT==2.10.1",
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
"cryptography==44.0.0",
|
||||
"cryptography==44.0.1",
|
||||
"Pillow==11.1.0",
|
||||
"propcache==0.2.1",
|
||||
"pyOpenSSL==24.3.0",
|
||||
@@ -82,7 +82,7 @@ dependencies = [
|
||||
"voluptuous-openapi==0.0.6",
|
||||
"yarl==1.18.3",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.143.0"
|
||||
"zeroconf==0.144.1"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
10
requirements.txt
generated
10
requirements.txt
generated
@@ -5,10 +5,10 @@
|
||||
# Home Assistant Core
|
||||
aiodns==3.2.0
|
||||
aiohasupervisor==0.3.0
|
||||
aiohttp==3.11.11
|
||||
aiohttp==3.11.12
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp-fast-zlib==0.2.0
|
||||
aiohttp-asyncmdnsresolver==0.0.3
|
||||
aiohttp-asyncmdnsresolver==0.1.0
|
||||
aiozoneinfo==0.2.1
|
||||
astral==2.2
|
||||
async-interrupt==1.2.0
|
||||
@@ -21,14 +21,14 @@ certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
fnv-hash-fast==1.2.2
|
||||
hass-nabucasa==0.88.1
|
||||
hass-nabucasa==0.90.0
|
||||
httpx==0.28.1
|
||||
home-assistant-bluetooth==1.13.0
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.5
|
||||
lru-dict==1.3.0
|
||||
PyJWT==2.10.1
|
||||
cryptography==44.0.0
|
||||
cryptography==44.0.1
|
||||
Pillow==11.1.0
|
||||
propcache==0.2.1
|
||||
pyOpenSSL==24.3.0
|
||||
@@ -51,4 +51,4 @@ voluptuous-serialize==2.6.0
|
||||
voluptuous-openapi==0.0.6
|
||||
yarl==1.18.3
|
||||
webrtc-models==0.3.0
|
||||
zeroconf==0.143.0
|
||||
zeroconf==0.144.1
|
||||
|
46
requirements_all.txt
generated
46
requirements_all.txt
generated
@@ -368,7 +368,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==12.3.2
|
||||
aioshelly==12.4.2
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -747,7 +747,7 @@ debugpy==1.8.11
|
||||
# decora==0.6
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==12.0.0
|
||||
deebot-client==12.1.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -818,10 +818,10 @@ ebusdpy==0.0.17
|
||||
ecoaliface==0.4.0
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.0.5
|
||||
eheimdigital==1.0.6
|
||||
|
||||
# homeassistant.components.electric_kiwi
|
||||
electrickiwi-api==0.8.5
|
||||
electrickiwi-api==0.9.14
|
||||
|
||||
# homeassistant.components.elevenlabs
|
||||
elevenlabs==1.9.0
|
||||
@@ -1033,7 +1033,7 @@ google-cloud-texttospeech==2.17.2
|
||||
google-generativeai==0.8.2
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==7.1.1
|
||||
google-nest-sdm==7.1.3
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -1049,7 +1049,7 @@ goslide-api==0.7.0
|
||||
gotailwind==0.3.0
|
||||
|
||||
# homeassistant.components.govee_ble
|
||||
govee-ble==0.42.0
|
||||
govee-ble==0.43.0
|
||||
|
||||
# homeassistant.components.govee_light_local
|
||||
govee-local-api==1.5.3
|
||||
@@ -1097,13 +1097,13 @@ ha-iotawattpy==0.1.2
|
||||
ha-philipsjs==3.2.2
|
||||
|
||||
# homeassistant.components.habitica
|
||||
habiticalib==0.3.4
|
||||
habiticalib==0.3.7
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==3.21.1
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.88.1
|
||||
hass-nabucasa==0.90.0
|
||||
|
||||
# homeassistant.components.splunk
|
||||
hass-splunk==0.1.1
|
||||
@@ -1140,10 +1140,10 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.65
|
||||
holidays==0.66
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250205.0
|
||||
home-assistant-frontend==20250210.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.2.5
|
||||
@@ -1281,7 +1281,7 @@ konnected==1.2.0
|
||||
krakenex==2.2.2
|
||||
|
||||
# homeassistant.components.lacrosse_view
|
||||
lacrosse-view==1.0.4
|
||||
lacrosse-view==1.1.1
|
||||
|
||||
# homeassistant.components.eufy
|
||||
lakeside==0.13
|
||||
@@ -1544,7 +1544,7 @@ odp-amsterdam==6.0.2
|
||||
oemthermostat==1.1.1
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.2.8
|
||||
ohme==1.2.9
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.4.7
|
||||
@@ -1556,7 +1556,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.8
|
||||
onedrive-personal-sdk==0.0.10
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==3.2.5
|
||||
@@ -1746,7 +1746,7 @@ py-schluter==0.1.7
|
||||
py-sucks==0.9.10
|
||||
|
||||
# homeassistant.components.synology_dsm
|
||||
py-synologydsm-api==2.6.0
|
||||
py-synologydsm-api==2.6.2
|
||||
|
||||
# homeassistant.components.atome
|
||||
pyAtome==0.1.1
|
||||
@@ -1897,7 +1897,7 @@ pydiscovergy==3.0.2
|
||||
pydoods==1.0.2
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
pydrawise==2025.1.0
|
||||
pydrawise==2025.2.0
|
||||
|
||||
# homeassistant.components.android_ip_webcam
|
||||
pydroid-ipcam==2.0.0
|
||||
@@ -1930,7 +1930,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.23.1
|
||||
pyenphase==1.25.1
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@@ -1954,7 +1954,7 @@ pyfibaro==0.8.0
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.43
|
||||
pyfireservicerota==0.0.46
|
||||
|
||||
# homeassistant.components.flic
|
||||
pyflic==2.0.4
|
||||
@@ -1987,7 +1987,7 @@ pygti==0.9.4
|
||||
pyhaversion==22.8.0
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==1.0.1
|
||||
pyheos==1.0.2
|
||||
|
||||
# homeassistant.components.hive
|
||||
pyhive-integration==1.0.1
|
||||
@@ -2385,7 +2385,7 @@ python-gc100==1.0.3a0
|
||||
python-gitlab==1.6.0
|
||||
|
||||
# homeassistant.components.google_drive
|
||||
python-google-drive-api==0.0.2
|
||||
python-google-drive-api==0.1.0
|
||||
|
||||
# homeassistant.components.analytics_insights
|
||||
python-homeassistant-analytics==0.8.1
|
||||
@@ -2603,7 +2603,7 @@ renault-api==0.2.9
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.9
|
||||
reolink-aio==0.11.10
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
@@ -2694,7 +2694,7 @@ sensorpush-ble==1.7.1
|
||||
sensoterra==2.0.1
|
||||
|
||||
# homeassistant.components.sentry
|
||||
sentry-sdk==1.40.3
|
||||
sentry-sdk==1.45.1
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.0.11
|
||||
@@ -3125,13 +3125,13 @@ zamg==0.3.6
|
||||
zengge==0.2
|
||||
|
||||
# homeassistant.components.zeroconf
|
||||
zeroconf==0.143.0
|
||||
zeroconf==0.144.1
|
||||
|
||||
# homeassistant.components.zeversolar
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.47
|
||||
zha==0.0.48
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
46
requirements_test_all.txt
generated
46
requirements_test_all.txt
generated
@@ -350,7 +350,7 @@ aioruuvigateway==0.1.0
|
||||
aiosenz==1.0.0
|
||||
|
||||
# homeassistant.components.shelly
|
||||
aioshelly==12.3.2
|
||||
aioshelly==12.4.2
|
||||
|
||||
# homeassistant.components.skybell
|
||||
aioskybell==22.7.0
|
||||
@@ -637,7 +637,7 @@ dbus-fast==2.33.0
|
||||
debugpy==1.8.11
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==12.0.0
|
||||
deebot-client==12.1.0
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -696,10 +696,10 @@ eagle100==0.1.1
|
||||
easyenergy==2.1.2
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.0.5
|
||||
eheimdigital==1.0.6
|
||||
|
||||
# homeassistant.components.electric_kiwi
|
||||
electrickiwi-api==0.8.5
|
||||
electrickiwi-api==0.9.14
|
||||
|
||||
# homeassistant.components.elevenlabs
|
||||
elevenlabs==1.9.0
|
||||
@@ -883,7 +883,7 @@ google-cloud-texttospeech==2.17.2
|
||||
google-generativeai==0.8.2
|
||||
|
||||
# homeassistant.components.nest
|
||||
google-nest-sdm==7.1.1
|
||||
google-nest-sdm==7.1.3
|
||||
|
||||
# homeassistant.components.google_photos
|
||||
google-photos-library-api==0.12.1
|
||||
@@ -899,7 +899,7 @@ goslide-api==0.7.0
|
||||
gotailwind==0.3.0
|
||||
|
||||
# homeassistant.components.govee_ble
|
||||
govee-ble==0.42.0
|
||||
govee-ble==0.43.0
|
||||
|
||||
# homeassistant.components.govee_light_local
|
||||
govee-local-api==1.5.3
|
||||
@@ -938,13 +938,13 @@ ha-iotawattpy==0.1.2
|
||||
ha-philipsjs==3.2.2
|
||||
|
||||
# homeassistant.components.habitica
|
||||
habiticalib==0.3.4
|
||||
habiticalib==0.3.7
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==3.21.1
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.88.1
|
||||
hass-nabucasa==0.90.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==2.2.3
|
||||
@@ -969,10 +969,10 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.65
|
||||
holidays==0.66
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20250205.0
|
||||
home-assistant-frontend==20250210.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2025.2.5
|
||||
@@ -1083,7 +1083,7 @@ konnected==1.2.0
|
||||
krakenex==2.2.2
|
||||
|
||||
# homeassistant.components.lacrosse_view
|
||||
lacrosse-view==1.0.4
|
||||
lacrosse-view==1.1.1
|
||||
|
||||
# homeassistant.components.laundrify
|
||||
laundrify-aio==1.2.2
|
||||
@@ -1292,7 +1292,7 @@ objgraph==3.5.0
|
||||
odp-amsterdam==6.0.2
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.2.8
|
||||
ohme==1.2.9
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.4.7
|
||||
@@ -1304,7 +1304,7 @@ omnilogic==0.4.5
|
||||
ondilo==0.5.0
|
||||
|
||||
# homeassistant.components.onedrive
|
||||
onedrive-personal-sdk==0.0.8
|
||||
onedrive-personal-sdk==0.0.10
|
||||
|
||||
# homeassistant.components.onvif
|
||||
onvif-zeep-async==3.2.5
|
||||
@@ -1444,7 +1444,7 @@ py-nightscout==1.2.2
|
||||
py-sucks==0.9.10
|
||||
|
||||
# homeassistant.components.synology_dsm
|
||||
py-synologydsm-api==2.6.0
|
||||
py-synologydsm-api==2.6.2
|
||||
|
||||
# homeassistant.components.hdmi_cec
|
||||
pyCEC==0.5.2
|
||||
@@ -1547,7 +1547,7 @@ pydexcom==0.2.3
|
||||
pydiscovergy==3.0.2
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
pydrawise==2025.1.0
|
||||
pydrawise==2025.2.0
|
||||
|
||||
# homeassistant.components.android_ip_webcam
|
||||
pydroid-ipcam==2.0.0
|
||||
@@ -1574,7 +1574,7 @@ pyeiscp==0.0.7
|
||||
pyemoncms==0.1.1
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==1.23.1
|
||||
pyenphase==1.25.1
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@@ -1592,7 +1592,7 @@ pyfibaro==0.8.0
|
||||
pyfido==2.1.2
|
||||
|
||||
# homeassistant.components.fireservicerota
|
||||
pyfireservicerota==0.0.43
|
||||
pyfireservicerota==0.0.46
|
||||
|
||||
# homeassistant.components.flic
|
||||
pyflic==2.0.4
|
||||
@@ -1616,7 +1616,7 @@ pygti==0.9.4
|
||||
pyhaversion==22.8.0
|
||||
|
||||
# homeassistant.components.heos
|
||||
pyheos==1.0.1
|
||||
pyheos==1.0.2
|
||||
|
||||
# homeassistant.components.hive
|
||||
pyhive-integration==1.0.1
|
||||
@@ -1930,7 +1930,7 @@ python-fullykiosk==0.0.14
|
||||
# python-gammu==3.2.4
|
||||
|
||||
# homeassistant.components.google_drive
|
||||
python-google-drive-api==0.0.2
|
||||
python-google-drive-api==0.1.0
|
||||
|
||||
# homeassistant.components.analytics_insights
|
||||
python-homeassistant-analytics==0.8.1
|
||||
@@ -2106,7 +2106,7 @@ renault-api==0.2.9
|
||||
renson-endura-delta==1.7.2
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.9
|
||||
reolink-aio==0.11.10
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.66
|
||||
@@ -2173,7 +2173,7 @@ sensorpush-ble==1.7.1
|
||||
sensoterra==2.0.1
|
||||
|
||||
# homeassistant.components.sentry
|
||||
sentry-sdk==1.40.3
|
||||
sentry-sdk==1.45.1
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.0.11
|
||||
@@ -2514,13 +2514,13 @@ yt-dlp[default]==2025.01.26
|
||||
zamg==0.3.6
|
||||
|
||||
# homeassistant.components.zeroconf
|
||||
zeroconf==0.143.0
|
||||
zeroconf==0.144.1
|
||||
|
||||
# homeassistant.components.zeversolar
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.47
|
||||
zha==0.0.48
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.60.0
|
||||
|
@@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import intent
|
||||
|
||||
from .conftest import MockAssistSatellite
|
||||
from .conftest import TEST_DOMAIN, MockAssistSatellite
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -65,12 +65,7 @@ async def test_broadcast_intent(
|
||||
},
|
||||
"language": "en",
|
||||
"response_type": "action_done",
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Done",
|
||||
}
|
||||
},
|
||||
"speech": {}, # response comes from intents
|
||||
}
|
||||
assert len(entity.announcements) == 1
|
||||
assert len(entity2.announcements) == 1
|
||||
@@ -99,12 +94,37 @@ async def test_broadcast_intent(
|
||||
},
|
||||
"language": "en",
|
||||
"response_type": "action_done",
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Done",
|
||||
}
|
||||
},
|
||||
"speech": {}, # response comes from intents
|
||||
}
|
||||
assert len(entity.announcements) == 1
|
||||
assert len(entity2.announcements) == 2
|
||||
|
||||
|
||||
async def test_broadcast_intent_excluded_domains(
|
||||
hass: HomeAssistant,
|
||||
init_components: ConfigEntry,
|
||||
entity: MockAssistSatellite,
|
||||
entity2: MockAssistSatellite,
|
||||
mock_tts: None,
|
||||
) -> None:
|
||||
"""Test that the broadcast intent filters out entities in excluded domains."""
|
||||
|
||||
# Exclude the "test" domain
|
||||
with patch(
|
||||
"homeassistant.components.assist_satellite.intent.EXCLUDED_DOMAINS",
|
||||
new={TEST_DOMAIN},
|
||||
):
|
||||
result = await intent.async_handle(
|
||||
hass, "test", intent.INTENT_BROADCAST, {"message": {"value": "Hello"}}
|
||||
)
|
||||
assert result.as_dict() == {
|
||||
"card": {},
|
||||
"data": {
|
||||
"failed": [],
|
||||
"success": [], # no satellites
|
||||
"targets": [],
|
||||
},
|
||||
"language": "en",
|
||||
"response_type": "action_done",
|
||||
"speech": {},
|
||||
}
|
||||
|
@@ -3697,12 +3697,13 @@
|
||||
# ---
|
||||
# name: test_delete_with_errors[side_effect1-storage_data0]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'home_assistant_error',
|
||||
'message': 'Boom!',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Boom!',
|
||||
}),
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
@@ -3757,12 +3758,13 @@
|
||||
# ---
|
||||
# name: test_delete_with_errors[side_effect1-storage_data1]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'home_assistant_error',
|
||||
'message': 'Boom!',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Boom!',
|
||||
}),
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
@@ -4019,12 +4021,89 @@
|
||||
# ---
|
||||
# name: test_details_with_errors[side_effect0]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'home_assistant_error',
|
||||
'message': 'Boom!',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Oops',
|
||||
}),
|
||||
'backup': dict({
|
||||
'addons': list([
|
||||
dict({
|
||||
'name': 'Test',
|
||||
'slug': 'test',
|
||||
'version': '1.0.0',
|
||||
}),
|
||||
]),
|
||||
'agents': dict({
|
||||
'backup.local': dict({
|
||||
'protected': False,
|
||||
'size': 0,
|
||||
}),
|
||||
}),
|
||||
'backup_id': 'abc123',
|
||||
'database_included': True,
|
||||
'date': '1970-01-01T00:00:00.000Z',
|
||||
'extra_metadata': dict({
|
||||
'instance_id': 'our_uuid',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
'failed_agent_ids': list([
|
||||
]),
|
||||
'folders': list([
|
||||
'media',
|
||||
'share',
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0',
|
||||
'name': 'Test',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_details_with_errors[side_effect1]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Boom!',
|
||||
}),
|
||||
'backup': dict({
|
||||
'addons': list([
|
||||
dict({
|
||||
'name': 'Test',
|
||||
'slug': 'test',
|
||||
'version': '1.0.0',
|
||||
}),
|
||||
]),
|
||||
'agents': dict({
|
||||
'backup.local': dict({
|
||||
'protected': False,
|
||||
'size': 0,
|
||||
}),
|
||||
}),
|
||||
'backup_id': 'abc123',
|
||||
'database_included': True,
|
||||
'date': '1970-01-01T00:00:00.000Z',
|
||||
'extra_metadata': dict({
|
||||
'instance_id': 'our_uuid',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
'failed_agent_ids': list([
|
||||
]),
|
||||
'folders': list([
|
||||
'media',
|
||||
'share',
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0',
|
||||
'name': 'Test',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
@@ -4542,12 +4621,105 @@
|
||||
# ---
|
||||
# name: test_info_with_errors[side_effect0]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'home_assistant_error',
|
||||
'message': 'Boom!',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Oops',
|
||||
}),
|
||||
'backups': list([
|
||||
dict({
|
||||
'addons': list([
|
||||
dict({
|
||||
'name': 'Test',
|
||||
'slug': 'test',
|
||||
'version': '1.0.0',
|
||||
}),
|
||||
]),
|
||||
'agents': dict({
|
||||
'backup.local': dict({
|
||||
'protected': False,
|
||||
'size': 0,
|
||||
}),
|
||||
}),
|
||||
'backup_id': 'abc123',
|
||||
'database_included': True,
|
||||
'date': '1970-01-01T00:00:00.000Z',
|
||||
'extra_metadata': dict({
|
||||
'instance_id': 'our_uuid',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
'failed_agent_ids': list([
|
||||
]),
|
||||
'folders': list([
|
||||
'media',
|
||||
'share',
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0',
|
||||
'name': 'Test',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
]),
|
||||
'last_attempted_automatic_backup': None,
|
||||
'last_completed_automatic_backup': None,
|
||||
'last_non_idle_event': None,
|
||||
'next_automatic_backup': None,
|
||||
'next_automatic_backup_additional': False,
|
||||
'state': 'idle',
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_info_with_errors[side_effect1]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': dict({
|
||||
'agent_errors': dict({
|
||||
'domain.test': 'Boom!',
|
||||
}),
|
||||
'backups': list([
|
||||
dict({
|
||||
'addons': list([
|
||||
dict({
|
||||
'name': 'Test',
|
||||
'slug': 'test',
|
||||
'version': '1.0.0',
|
||||
}),
|
||||
]),
|
||||
'agents': dict({
|
||||
'backup.local': dict({
|
||||
'protected': False,
|
||||
'size': 0,
|
||||
}),
|
||||
}),
|
||||
'backup_id': 'abc123',
|
||||
'database_included': True,
|
||||
'date': '1970-01-01T00:00:00.000Z',
|
||||
'extra_metadata': dict({
|
||||
'instance_id': 'our_uuid',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
'failed_agent_ids': list([
|
||||
]),
|
||||
'folders': list([
|
||||
'media',
|
||||
'share',
|
||||
]),
|
||||
'homeassistant_included': True,
|
||||
'homeassistant_version': '2024.12.0',
|
||||
'name': 'Test',
|
||||
'with_automatic_settings': True,
|
||||
}),
|
||||
]),
|
||||
'last_attempted_automatic_backup': None,
|
||||
'last_completed_automatic_backup': None,
|
||||
'last_non_idle_event': None,
|
||||
'next_automatic_backup': None,
|
||||
'next_automatic_backup_additional': False,
|
||||
'state': 'idle',
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,7 @@ from hass_nabucasa import Cloud
|
||||
from hass_nabucasa.auth import CognitoAuth
|
||||
from hass_nabucasa.cloudhooks import Cloudhooks
|
||||
from hass_nabucasa.const import DEFAULT_SERVERS, DEFAULT_VALUES, STATE_CONNECTED
|
||||
from hass_nabucasa.files import Files
|
||||
from hass_nabucasa.google_report_state import GoogleReportState
|
||||
from hass_nabucasa.ice_servers import IceServers
|
||||
from hass_nabucasa.iot import CloudIoT
|
||||
@@ -68,6 +69,7 @@ async def cloud_fixture() -> AsyncGenerator[MagicMock]:
|
||||
spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED
|
||||
)
|
||||
mock_cloud.voice = MagicMock(spec=Voice)
|
||||
mock_cloud.files = MagicMock(spec=Files)
|
||||
mock_cloud.started = None
|
||||
mock_cloud.ice_servers = MagicMock(
|
||||
spec=IceServers,
|
||||
|
@@ -1,14 +1,15 @@
|
||||
"""Test the cloud backup platform."""
|
||||
|
||||
from collections.abc import AsyncGenerator, AsyncIterator, Generator
|
||||
from collections.abc import AsyncGenerator, Generator
|
||||
from io import StringIO
|
||||
from typing import Any
|
||||
from unittest.mock import Mock, PropertyMock, patch
|
||||
|
||||
from aiohttp import ClientError
|
||||
from hass_nabucasa import CloudError
|
||||
from hass_nabucasa.api import CloudApiNonRetryableError
|
||||
from hass_nabucasa.files import FilesError
|
||||
import pytest
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DOMAIN as BACKUP_DOMAIN,
|
||||
@@ -22,11 +23,20 @@ from homeassistant.components.cloud.const import EVENT_CLOUD_EVENT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.aiohttp import MockStreamReader
|
||||
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator
|
||||
|
||||
|
||||
class MockStreamReaderChunked(MockStreamReader):
|
||||
"""Mock a stream reader with simulated chunked data."""
|
||||
|
||||
async def readchunk(self) -> tuple[bytes, bool]:
|
||||
"""Read bytes."""
|
||||
return (self._content.read(), False)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_integration(
|
||||
hass: HomeAssistant,
|
||||
@@ -55,49 +65,6 @@ def mock_delete_file() -> Generator[MagicMock]:
|
||||
yield delete_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_download_details() -> Generator[MagicMock]:
|
||||
"""Mock list files."""
|
||||
with patch(
|
||||
"homeassistant.components.cloud.backup.async_files_download_details",
|
||||
spec_set=True,
|
||||
) as download_details:
|
||||
download_details.return_value = {
|
||||
"url": (
|
||||
"https://blabla.cloudflarestorage.com/blabla/backup/"
|
||||
"462e16810d6841228828d9dd2f9e341e.tar?X-Amz-Algorithm=blah"
|
||||
),
|
||||
}
|
||||
yield download_details
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_get_upload_details() -> Generator[MagicMock]:
|
||||
"""Mock list files."""
|
||||
with patch(
|
||||
"homeassistant.components.cloud.backup.async_files_upload_details",
|
||||
spec_set=True,
|
||||
) as download_details:
|
||||
download_details.return_value = {
|
||||
"url": (
|
||||
"https://blabla.cloudflarestorage.com/blabla/backup/"
|
||||
"ea5c969e492c49df89d432a1483b8dc3.tar?X-Amz-Algorithm=blah"
|
||||
),
|
||||
"headers": {
|
||||
"content-md5": "HOhSM3WZkpHRYGiz4YRGIQ==",
|
||||
"x-amz-meta-storage-type": "backup",
|
||||
"x-amz-meta-b64json": (
|
||||
"eyJhZGRvbnMiOltdLCJiYWNrdXBfaWQiOiJjNDNiNWU2MCIsImRhdGUiOiIyMDI0LT"
|
||||
"EyLTAzVDA0OjI1OjUwLjMyMDcwMy0wNTowMCIsImRhdGFiYXNlX2luY2x1ZGVkIjpm"
|
||||
"YWxzZSwiZm9sZGVycyI6W10sImhvbWVhc3Npc3RhbnRfaW5jbHVkZWQiOnRydWUsIm"
|
||||
"hvbWVhc3Npc3RhbnRfdmVyc2lvbiI6IjIwMjQuMTIuMC5kZXYwIiwibmFtZSI6ImVy"
|
||||
"aWsiLCJwcm90ZWN0ZWQiOnRydWUsInNpemUiOjM1NjI0OTYwfQ=="
|
||||
),
|
||||
},
|
||||
}
|
||||
yield download_details
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_list_files() -> Generator[MagicMock]:
|
||||
"""Mock list files."""
|
||||
@@ -264,52 +231,30 @@ async def test_agents_download(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_get_download_details: Mock,
|
||||
cloud: Mock,
|
||||
) -> None:
|
||||
"""Test agent download backup."""
|
||||
client = await hass_client()
|
||||
backup_id = "23e64aec"
|
||||
|
||||
aioclient_mock.get(
|
||||
mock_get_download_details.return_value["url"], content=b"backup data"
|
||||
)
|
||||
cloud.files.download.return_value = MockStreamReaderChunked(b"backup data")
|
||||
|
||||
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
|
||||
assert resp.status == 200
|
||||
assert await resp.content.read() == b"backup data"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("side_effect", [ClientError, CloudError])
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_download_fail_cloud(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_get_download_details: Mock,
|
||||
side_effect: Exception,
|
||||
) -> None:
|
||||
"""Test agent download backup, when cloud user is logged in."""
|
||||
client = await hass_client()
|
||||
backup_id = "23e64aec"
|
||||
mock_get_download_details.side_effect = side_effect
|
||||
|
||||
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
|
||||
assert resp.status == 500
|
||||
content = await resp.content.read()
|
||||
assert "Failed to get download details" in content.decode()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_download_fail_get(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_get_download_details: Mock,
|
||||
cloud: Mock,
|
||||
) -> None:
|
||||
"""Test agent download backup, when cloud user is logged in."""
|
||||
client = await hass_client()
|
||||
backup_id = "23e64aec"
|
||||
|
||||
aioclient_mock.get(mock_get_download_details.return_value["url"], status=500)
|
||||
cloud.files.download.side_effect = FilesError("Oh no :(")
|
||||
|
||||
resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud")
|
||||
assert resp.status == 500
|
||||
@@ -336,11 +281,11 @@ async def test_agents_upload(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_get_upload_details: Mock,
|
||||
cloud: Mock,
|
||||
) -> None:
|
||||
"""Test agent upload backup."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
@@ -353,10 +298,8 @@ async def test_agents_upload(
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=True,
|
||||
size=0,
|
||||
size=len(backup_data),
|
||||
)
|
||||
aioclient_mock.put(mock_get_upload_details.return_value["url"])
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
@@ -367,37 +310,34 @@ async def test_agents_upload(
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
|
||||
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
|
||||
fetch_backup.return_value = test_backup
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO("test")},
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert aioclient_mock.mock_calls[-1][0] == "PUT"
|
||||
assert aioclient_mock.mock_calls[-1][1] == URL(
|
||||
mock_get_upload_details.return_value["url"]
|
||||
)
|
||||
assert isinstance(aioclient_mock.mock_calls[-1][2], AsyncIterator)
|
||||
assert len(cloud.files.upload.mock_calls) == 1
|
||||
metadata = cloud.files.upload.mock_calls[-1].kwargs["metadata"]
|
||||
assert metadata["backup_id"] == backup_id
|
||||
|
||||
assert resp.status == 201
|
||||
assert f"Uploading backup {backup_id}" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("put_mock_kwargs", [{"status": 500}, {"exc": TimeoutError}])
|
||||
@pytest.mark.parametrize("side_effect", [FilesError("Boom!"), CloudError("Boom!")])
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_upload_fail_put(
|
||||
async def test_agents_upload_fail(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_storage: dict[str, Any],
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_get_upload_details: Mock,
|
||||
put_mock_kwargs: dict[str, Any],
|
||||
side_effect: Exception,
|
||||
cloud: Mock,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test agent upload backup fails."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
@@ -410,9 +350,10 @@ async def test_agents_upload_fail_put(
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=True,
|
||||
size=0,
|
||||
size=len(backup_data),
|
||||
)
|
||||
aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs)
|
||||
|
||||
cloud.files.upload.side_effect = side_effect
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -427,17 +368,17 @@ async def test_agents_upload_fail_put(
|
||||
patch("homeassistant.components.cloud.backup.random.randint", return_value=60),
|
||||
patch("homeassistant.components.cloud.backup._RETRY_LIMIT", 2),
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
|
||||
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
|
||||
fetch_backup.return_value = test_backup
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO("test")},
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
assert "Failed to upload backup, retrying (2/2) in 60s" in caplog.text
|
||||
assert resp.status == 201
|
||||
assert cloud.files.upload.call_count == 2
|
||||
store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"]
|
||||
assert len(store_backups) == 1
|
||||
stored_backup = store_backups[0]
|
||||
@@ -445,19 +386,33 @@ async def test_agents_upload_fail_put(
|
||||
assert stored_backup["failed_agent_ids"] == ["cloud.cloud"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("side_effect", [ClientError, CloudError])
|
||||
@pytest.mark.usefixtures("cloud_logged_in")
|
||||
async def test_agents_upload_fail_cloud(
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect", "logmsg"),
|
||||
[
|
||||
(
|
||||
CloudApiNonRetryableError("Boom!", code="NC-SH-FH-03"),
|
||||
"The backup size of 13.37GB is too large to be uploaded to Home Assistant Cloud",
|
||||
),
|
||||
(
|
||||
CloudApiNonRetryableError("Boom!", code="NC-CE-01"),
|
||||
"Failed to upload backup Boom!",
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_upload_fail_non_retryable(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_storage: dict[str, Any],
|
||||
mock_get_upload_details: Mock,
|
||||
side_effect: Exception,
|
||||
logmsg: str,
|
||||
cloud: Mock,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test agent upload backup, when cloud user is logged in."""
|
||||
"""Test agent upload backup fails with non-retryable error."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
mock_get_upload_details.side_effect = side_effect
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
backup_id=backup_id,
|
||||
@@ -469,8 +424,11 @@ async def test_agents_upload_fail_cloud(
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=True,
|
||||
size=0,
|
||||
size=14358124749,
|
||||
)
|
||||
|
||||
cloud.files.upload.side_effect = side_effect
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
@@ -480,17 +438,19 @@ async def test_agents_upload_fail_cloud(
|
||||
return_value=test_backup,
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
patch("homeassistant.components.cloud.backup.asyncio.sleep"),
|
||||
patch("homeassistant.components.cloud.backup.calculate_b64md5"),
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
|
||||
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
|
||||
fetch_backup.return_value = test_backup
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO("test")},
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert logmsg in caplog.text
|
||||
assert resp.status == 201
|
||||
assert cloud.files.upload.call_count == 1
|
||||
store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"]
|
||||
assert len(store_backups) == 1
|
||||
stored_backup = store_backups[0]
|
||||
@@ -505,6 +465,7 @@ async def test_agents_upload_not_protected(
|
||||
) -> None:
|
||||
"""Test agent upload backup, when cloud user is logged in."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
@@ -517,7 +478,7 @@ async def test_agents_upload_not_protected(
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=False,
|
||||
size=0,
|
||||
size=len(backup_data),
|
||||
)
|
||||
with (
|
||||
patch("pathlib.Path.open"),
|
||||
@@ -528,7 +489,7 @@ async def test_agents_upload_not_protected(
|
||||
):
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO("test")},
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -540,6 +501,53 @@ async def test_agents_upload_not_protected(
|
||||
assert stored_backup["failed_agent_ids"] == ["cloud.cloud"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_upload_wrong_size(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
cloud: Mock,
|
||||
) -> None:
|
||||
"""Test agent upload backup with the wrong size."""
|
||||
client = await hass_client()
|
||||
backup_data = "test"
|
||||
backup_id = "test-backup"
|
||||
test_backup = AgentBackup(
|
||||
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],
|
||||
backup_id=backup_id,
|
||||
database_included=True,
|
||||
date="1970-01-01T00:00:00.000Z",
|
||||
extra_metadata={},
|
||||
folders=[Folder.MEDIA, Folder.SHARE],
|
||||
homeassistant_included=True,
|
||||
homeassistant_version="2024.12.0",
|
||||
name="Test",
|
||||
protected=True,
|
||||
size=len(backup_data) - 1,
|
||||
)
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
) as fetch_backup,
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=test_backup,
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""])
|
||||
fetch_backup.return_value = test_backup
|
||||
resp = await client.post(
|
||||
"/api/backup/upload?agent_id=cloud.cloud",
|
||||
data={"file": StringIO(backup_data)},
|
||||
)
|
||||
|
||||
assert len(cloud.files.upload.mock_calls) == 0
|
||||
|
||||
assert resp.status == 201
|
||||
assert "Upload failed for cloud.cloud" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files")
|
||||
async def test_agents_delete(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -1 +1,13 @@
|
||||
"""Tests for the Electric Kiwi integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def init_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None:
|
||||
"""Fixture for setting up the integration with args."""
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
@@ -2,11 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Generator
|
||||
from collections.abc import Generator
|
||||
from time import time
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from electrickiwi_api.model import AccountBalance, Hop, HopIntervals
|
||||
from electrickiwi_api.model import (
|
||||
AccountSummary,
|
||||
CustomerConnection,
|
||||
Hop,
|
||||
HopIntervals,
|
||||
Service,
|
||||
Session,
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.application_credentials import (
|
||||
@@ -23,37 +30,55 @@ CLIENT_ID = "1234"
|
||||
CLIENT_SECRET = "5678"
|
||||
REDIRECT_URI = "https://example.com/auth/external/callback"
|
||||
|
||||
type YieldFixture = Generator[AsyncMock]
|
||||
type ComponentSetup = Callable[[], Awaitable[bool]]
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_credentials(hass: HomeAssistant) -> None:
|
||||
"""Fixture to setup application credentials component."""
|
||||
await async_setup_component(hass, "application_credentials", {})
|
||||
await async_import_client_credential(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ClientCredential(CLIENT_ID, CLIENT_SECRET),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def request_setup(current_request_with_host: None) -> None:
|
||||
"""Request setup."""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def component_setup(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> ComponentSetup:
|
||||
"""Fixture for setting up the integration."""
|
||||
|
||||
async def _setup_func() -> bool:
|
||||
assert await async_setup_component(hass, "application_credentials", {})
|
||||
await hass.async_block_till_done()
|
||||
await async_import_client_credential(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ClientCredential(CLIENT_ID, CLIENT_SECRET),
|
||||
DOMAIN,
|
||||
def electrickiwi_api() -> Generator[AsyncMock]:
|
||||
"""Mock ek api and return values."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.electric_kiwi.ElectricKiwiApi",
|
||||
autospec=True,
|
||||
) as mock_client,
|
||||
patch(
|
||||
"homeassistant.components.electric_kiwi.config_flow.ElectricKiwiApi",
|
||||
new=mock_client,
|
||||
),
|
||||
):
|
||||
client = mock_client.return_value
|
||||
client.customer_number = 123456
|
||||
client.electricity = Service(
|
||||
identifier="00000000DDA",
|
||||
service="electricity",
|
||||
service_status="Y",
|
||||
is_primary_service=True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
config_entry.add_to_hass(hass)
|
||||
result = await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
return result
|
||||
|
||||
return _setup_func
|
||||
client.get_active_session.return_value = Session.from_dict(
|
||||
load_json_value_fixture("session.json", DOMAIN)
|
||||
)
|
||||
client.get_hop_intervals.return_value = HopIntervals.from_dict(
|
||||
load_json_value_fixture("hop_intervals.json", DOMAIN)
|
||||
)
|
||||
client.get_hop.return_value = Hop.from_dict(
|
||||
load_json_value_fixture("get_hop.json", DOMAIN)
|
||||
)
|
||||
client.get_account_summary.return_value = AccountSummary.from_dict(
|
||||
load_json_value_fixture("account_summary.json", DOMAIN)
|
||||
)
|
||||
client.get_connection_details.return_value = CustomerConnection.from_dict(
|
||||
load_json_value_fixture("connection_details.json", DOMAIN)
|
||||
)
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry")
|
||||
@@ -63,7 +88,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
title="Electric Kiwi",
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"id": "12345",
|
||||
"id": "123456",
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
@@ -74,6 +99,54 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
},
|
||||
},
|
||||
unique_id=DOMAIN,
|
||||
version=1,
|
||||
minor_version=1,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry2")
|
||||
def mock_config_entry2(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Create mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="Electric Kiwi",
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"id": "123457",
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
"expires_at": time() + 60,
|
||||
},
|
||||
},
|
||||
unique_id="1234567",
|
||||
version=1,
|
||||
minor_version=1,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(name="migrated_config_entry")
|
||||
def mock_migrated_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Create mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="Electric Kiwi",
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"id": "123456",
|
||||
"auth_implementation": DOMAIN,
|
||||
"token": {
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
"expires_at": time() + 60,
|
||||
},
|
||||
},
|
||||
unique_id="123456",
|
||||
version=1,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
|
||||
@@ -87,35 +160,10 @@ def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
|
||||
|
||||
@pytest.fixture(name="ek_auth")
|
||||
def electric_kiwi_auth() -> YieldFixture:
|
||||
def electric_kiwi_auth() -> Generator[AsyncMock]:
|
||||
"""Patch access to electric kiwi access token."""
|
||||
with patch(
|
||||
"homeassistant.components.electric_kiwi.api.AsyncConfigEntryAuth"
|
||||
"homeassistant.components.electric_kiwi.api.ConfigEntryElectricKiwiAuth"
|
||||
) as mock_auth:
|
||||
mock_auth.return_value.async_get_access_token = AsyncMock("auth_token")
|
||||
yield mock_auth
|
||||
|
||||
|
||||
@pytest.fixture(name="ek_api")
|
||||
def ek_api() -> YieldFixture:
|
||||
"""Mock ek api and return values."""
|
||||
with patch(
|
||||
"homeassistant.components.electric_kiwi.ElectricKiwiApi", autospec=True
|
||||
) as mock_ek_api:
|
||||
mock_ek_api.return_value.customer_number = 123456
|
||||
mock_ek_api.return_value.connection_id = 123456
|
||||
mock_ek_api.return_value.set_active_session.return_value = None
|
||||
mock_ek_api.return_value.get_hop_intervals.return_value = (
|
||||
HopIntervals.from_dict(
|
||||
load_json_value_fixture("hop_intervals.json", DOMAIN)
|
||||
)
|
||||
)
|
||||
mock_ek_api.return_value.get_hop.return_value = Hop.from_dict(
|
||||
load_json_value_fixture("get_hop.json", DOMAIN)
|
||||
)
|
||||
mock_ek_api.return_value.get_account_balance.return_value = (
|
||||
AccountBalance.from_dict(
|
||||
load_json_value_fixture("account_balance.json", DOMAIN)
|
||||
)
|
||||
)
|
||||
yield mock_ek_api
|
||||
|
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"data": {
|
||||
"connections": [
|
||||
{
|
||||
"hop_percentage": "3.5",
|
||||
"id": 3,
|
||||
"running_balance": "184.09",
|
||||
"start_date": "2020-10-04",
|
||||
"unbilled_days": 15
|
||||
}
|
||||
],
|
||||
"last_billed_amount": "-66.31",
|
||||
"last_billed_date": "2020-10-03",
|
||||
"next_billing_date": "2020-11-03",
|
||||
"is_prepay": "N",
|
||||
"summary": {
|
||||
"credits": "0.0",
|
||||
"electricity_used": "184.09",
|
||||
"other_charges": "0.00",
|
||||
"payments": "-220.0"
|
||||
},
|
||||
"total_account_balance": "-102.22",
|
||||
"total_billing_days": 30,
|
||||
"total_running_balance": "184.09",
|
||||
"type": "account_running_balance"
|
||||
},
|
||||
"status": 1
|
||||
}
|
43
tests/components/electric_kiwi/fixtures/account_summary.json
Normal file
43
tests/components/electric_kiwi/fixtures/account_summary.json
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"data": {
|
||||
"type": "account_summary",
|
||||
"total_running_balance": "184.09",
|
||||
"total_account_balance": "-102.22",
|
||||
"total_billing_days": 31,
|
||||
"next_billing_date": "2025-02-19",
|
||||
"service_names": ["power"],
|
||||
"services": {
|
||||
"power": {
|
||||
"connections": [
|
||||
{
|
||||
"id": 515363,
|
||||
"running_balance": "12.98",
|
||||
"unbilled_days": 5,
|
||||
"hop_percentage": "11.2",
|
||||
"start_date": "2025-01-19",
|
||||
"service_label": "Power"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"date_to_pay": "",
|
||||
"invoice_id": "",
|
||||
"total_invoiced_charges": "",
|
||||
"default_to_pay": "",
|
||||
"invoice_exists": 1,
|
||||
"display_date": "2025-01-19",
|
||||
"last_billed_date": "2025-01-18",
|
||||
"last_billed_amount": "-21.02",
|
||||
"summary": {
|
||||
"electricity_used": "12.98",
|
||||
"other_charges": "0.00",
|
||||
"payments": "0.00",
|
||||
"credits": "0.00",
|
||||
"mobile_charges": "0.00",
|
||||
"broadband_charges": "0.00",
|
||||
"addon_unbilled_charges": {}
|
||||
},
|
||||
"is_prepay": "N"
|
||||
},
|
||||
"status": 1
|
||||
}
|
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"data": {
|
||||
"type": "connection",
|
||||
"id": 515363,
|
||||
"customer_id": 273941,
|
||||
"customer_number": 34030646,
|
||||
"icp_identifier": "00000000DDA",
|
||||
"address": "",
|
||||
"short_address": "",
|
||||
"physical_address_unit": "",
|
||||
"physical_address_number": "555",
|
||||
"physical_address_street": "RACECOURSE ROAD",
|
||||
"physical_address_suburb": "",
|
||||
"physical_address_town": "Blah",
|
||||
"physical_address_region": "Blah",
|
||||
"physical_address_postcode": "0000",
|
||||
"is_active": "Y",
|
||||
"pricing_plan": {
|
||||
"id": 51423,
|
||||
"usage": "0.0000",
|
||||
"fixed": "0.6000",
|
||||
"usage_rate_inc_gst": "0.0000",
|
||||
"supply_rate_inc_gst": "0.6900",
|
||||
"plan_description": "MoveMaster Anytime Residential (Low User)",
|
||||
"plan_type": "movemaster_tou",
|
||||
"signup_price_plan_blurb": "Better rates every day during off-peak, and all day on weekends. Plus half price nights (11pm-7am) and our best solar buyback.",
|
||||
"signup_price_plan_label": "MoveMaster",
|
||||
"app_price_plan_label": "Your MoveMaster rates are...",
|
||||
"solar_rate_excl_gst": "0.1250",
|
||||
"solar_rate_incl_gst": "0.1438",
|
||||
"pricing_type": "tou_plus",
|
||||
"tou_plus": {
|
||||
"fixed_rate_excl_gst": "0.6000",
|
||||
"fixed_rate_incl_gst": "0.6900",
|
||||
"interval_types": ["peak", "off_peak_shoulder", "off_peak_night"],
|
||||
"peak": {
|
||||
"price_excl_gst": "0.5390",
|
||||
"price_incl_gst": "0.6199",
|
||||
"display_text": {
|
||||
"Weekdays": "7am-9am, 5pm-9pm"
|
||||
},
|
||||
"tou_plus_label": "Peak"
|
||||
},
|
||||
"off_peak_shoulder": {
|
||||
"price_excl_gst": "0.3234",
|
||||
"price_incl_gst": "0.3719",
|
||||
"display_text": {
|
||||
"Weekdays": "9am-5pm, 9pm-11pm",
|
||||
"Weekends": "7am-11pm"
|
||||
},
|
||||
"tou_plus_label": "Off-peak shoulder"
|
||||
},
|
||||
"off_peak_night": {
|
||||
"price_excl_gst": "0.2695",
|
||||
"price_incl_gst": "0.3099",
|
||||
"display_text": {
|
||||
"Every day": "11pm-7am"
|
||||
},
|
||||
"tou_plus_label": "Off-peak night"
|
||||
}
|
||||
}
|
||||
},
|
||||
"hop": {
|
||||
"start_time": "9:00 PM",
|
||||
"end_time": "10:00 PM",
|
||||
"interval_start": "43",
|
||||
"interval_end": "44"
|
||||
},
|
||||
"start_date": "2022-03-03",
|
||||
"end_date": "",
|
||||
"property_type": "residential"
|
||||
}
|
||||
}
|
@@ -1,16 +1,18 @@
|
||||
{
|
||||
"data": {
|
||||
"connection_id": "3",
|
||||
"customer_number": 1000001,
|
||||
"end": {
|
||||
"end_time": "5:00 PM",
|
||||
"interval": "34"
|
||||
},
|
||||
"type": "hop_customer",
|
||||
"customer_id": 123456,
|
||||
"service_type": "electricity",
|
||||
"connection_id": 515363,
|
||||
"billing_id": 1247975,
|
||||
"start": {
|
||||
"start_time": "4:00 PM",
|
||||
"interval": "33"
|
||||
"interval": "33",
|
||||
"start_time": "4:00 PM"
|
||||
},
|
||||
"type": "hop_customer"
|
||||
"end": {
|
||||
"interval": "34",
|
||||
"end_time": "5:00 PM"
|
||||
}
|
||||
},
|
||||
"status": 1
|
||||
}
|
||||
|
@@ -1,249 +1,250 @@
|
||||
{
|
||||
"data": {
|
||||
"hop_duration": "60",
|
||||
"type": "hop_intervals",
|
||||
"hop_duration": "60",
|
||||
"intervals": {
|
||||
"1": {
|
||||
"active": 1,
|
||||
"start_time": "12:00 AM",
|
||||
"end_time": "1:00 AM",
|
||||
"start_time": "12:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"2": {
|
||||
"active": 1,
|
||||
"start_time": "12:30 AM",
|
||||
"end_time": "1:30 AM",
|
||||
"start_time": "12:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"3": {
|
||||
"active": 1,
|
||||
"start_time": "1:00 AM",
|
||||
"end_time": "2:00 AM",
|
||||
"start_time": "1:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"4": {
|
||||
"active": 1,
|
||||
"start_time": "1:30 AM",
|
||||
"end_time": "2:30 AM",
|
||||
"start_time": "1:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"5": {
|
||||
"active": 1,
|
||||
"start_time": "2:00 AM",
|
||||
"end_time": "3:00 AM",
|
||||
"start_time": "2:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"6": {
|
||||
"active": 1,
|
||||
"start_time": "2:30 AM",
|
||||
"end_time": "3:30 AM",
|
||||
"start_time": "2:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"7": {
|
||||
"active": 1,
|
||||
"start_time": "3:00 AM",
|
||||
"end_time": "4:00 AM",
|
||||
"start_time": "3:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"8": {
|
||||
"active": 1,
|
||||
"start_time": "3:30 AM",
|
||||
"end_time": "4:30 AM",
|
||||
"start_time": "3:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"9": {
|
||||
"active": 1,
|
||||
"start_time": "4:00 AM",
|
||||
"end_time": "5:00 AM",
|
||||
"start_time": "4:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"10": {
|
||||
"active": 1,
|
||||
"start_time": "4:30 AM",
|
||||
"end_time": "5:30 AM",
|
||||
"start_time": "4:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"11": {
|
||||
"active": 1,
|
||||
"start_time": "5:00 AM",
|
||||
"end_time": "6:00 AM",
|
||||
"start_time": "5:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"12": {
|
||||
"active": 1,
|
||||
"start_time": "5:30 AM",
|
||||
"end_time": "6:30 AM",
|
||||
"start_time": "5:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"13": {
|
||||
"active": 1,
|
||||
"start_time": "6:00 AM",
|
||||
"end_time": "7:00 AM",
|
||||
"start_time": "6:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"14": {
|
||||
"active": 1,
|
||||
"start_time": "6:30 AM",
|
||||
"end_time": "7:30 AM",
|
||||
"start_time": "6:30 AM"
|
||||
"active": 0
|
||||
},
|
||||
"15": {
|
||||
"active": 1,
|
||||
"start_time": "7:00 AM",
|
||||
"end_time": "8:00 AM",
|
||||
"start_time": "7:00 AM"
|
||||
"active": 0
|
||||
},
|
||||
"16": {
|
||||
"active": 1,
|
||||
"start_time": "7:30 AM",
|
||||
"end_time": "8:30 AM",
|
||||
"start_time": "7:30 AM"
|
||||
"active": 0
|
||||
},
|
||||
"17": {
|
||||
"active": 1,
|
||||
"start_time": "8:00 AM",
|
||||
"end_time": "9:00 AM",
|
||||
"start_time": "8:00 AM"
|
||||
"active": 0
|
||||
},
|
||||
"18": {
|
||||
"active": 1,
|
||||
"start_time": "8:30 AM",
|
||||
"end_time": "9:30 AM",
|
||||
"start_time": "8:30 AM"
|
||||
"active": 0
|
||||
},
|
||||
"19": {
|
||||
"active": 1,
|
||||
"start_time": "9:00 AM",
|
||||
"end_time": "10:00 AM",
|
||||
"start_time": "9:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"20": {
|
||||
"active": 1,
|
||||
"start_time": "9:30 AM",
|
||||
"end_time": "10:30 AM",
|
||||
"start_time": "9:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"21": {
|
||||
"active": 1,
|
||||
"start_time": "10:00 AM",
|
||||
"end_time": "11:00 AM",
|
||||
"start_time": "10:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"22": {
|
||||
"active": 1,
|
||||
"start_time": "10:30 AM",
|
||||
"end_time": "11:30 AM",
|
||||
"start_time": "10:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"23": {
|
||||
"active": 1,
|
||||
"start_time": "11:00 AM",
|
||||
"end_time": "12:00 PM",
|
||||
"start_time": "11:00 AM"
|
||||
"active": 1
|
||||
},
|
||||
"24": {
|
||||
"active": 1,
|
||||
"start_time": "11:30 AM",
|
||||
"end_time": "12:30 PM",
|
||||
"start_time": "11:30 AM"
|
||||
"active": 1
|
||||
},
|
||||
"25": {
|
||||
"active": 1,
|
||||
"start_time": "12:00 PM",
|
||||
"end_time": "1:00 PM",
|
||||
"start_time": "12:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"26": {
|
||||
"active": 1,
|
||||
"start_time": "12:30 PM",
|
||||
"end_time": "1:30 PM",
|
||||
"start_time": "12:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"27": {
|
||||
"active": 1,
|
||||
"start_time": "1:00 PM",
|
||||
"end_time": "2:00 PM",
|
||||
"start_time": "1:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"28": {
|
||||
"active": 1,
|
||||
"start_time": "1:30 PM",
|
||||
"end_time": "2:30 PM",
|
||||
"start_time": "1:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"29": {
|
||||
"active": 1,
|
||||
"start_time": "2:00 PM",
|
||||
"end_time": "3:00 PM",
|
||||
"start_time": "2:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"30": {
|
||||
"active": 1,
|
||||
"start_time": "2:30 PM",
|
||||
"end_time": "3:30 PM",
|
||||
"start_time": "2:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"31": {
|
||||
"active": 1,
|
||||
"start_time": "3:00 PM",
|
||||
"end_time": "4:00 PM",
|
||||
"start_time": "3:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"32": {
|
||||
"active": 1,
|
||||
"start_time": "3:30 PM",
|
||||
"end_time": "4:30 PM",
|
||||
"start_time": "3:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"33": {
|
||||
"active": 1,
|
||||
"start_time": "4:00 PM",
|
||||
"end_time": "5:00 PM",
|
||||
"start_time": "4:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"34": {
|
||||
"active": 1,
|
||||
"start_time": "4:30 PM",
|
||||
"end_time": "5:30 PM",
|
||||
"start_time": "4:30 PM"
|
||||
"active": 0
|
||||
},
|
||||
"35": {
|
||||
"active": 1,
|
||||
"start_time": "5:00 PM",
|
||||
"end_time": "6:00 PM",
|
||||
"start_time": "5:00 PM"
|
||||
"active": 0
|
||||
},
|
||||
"36": {
|
||||
"active": 1,
|
||||
"start_time": "5:30 PM",
|
||||
"end_time": "6:30 PM",
|
||||
"start_time": "5:30 PM"
|
||||
"active": 0
|
||||
},
|
||||
"37": {
|
||||
"active": 1,
|
||||
"start_time": "6:00 PM",
|
||||
"end_time": "7:00 PM",
|
||||
"start_time": "6:00 PM"
|
||||
"active": 0
|
||||
},
|
||||
"38": {
|
||||
"active": 1,
|
||||
"start_time": "6:30 PM",
|
||||
"end_time": "7:30 PM",
|
||||
"start_time": "6:30 PM"
|
||||
"active": 0
|
||||
},
|
||||
"39": {
|
||||
"active": 1,
|
||||
"start_time": "7:00 PM",
|
||||
"end_time": "8:00 PM",
|
||||
"start_time": "7:00 PM"
|
||||
"active": 0
|
||||
},
|
||||
"40": {
|
||||
"active": 1,
|
||||
"start_time": "7:30 PM",
|
||||
"end_time": "8:30 PM",
|
||||
"start_time": "7:30 PM"
|
||||
"active": 0
|
||||
},
|
||||
"41": {
|
||||
"active": 1,
|
||||
"start_time": "8:00 PM",
|
||||
"end_time": "9:00 PM",
|
||||
"start_time": "8:00 PM"
|
||||
"active": 0
|
||||
},
|
||||
"42": {
|
||||
"active": 1,
|
||||
"start_time": "8:30 PM",
|
||||
"end_time": "9:30 PM",
|
||||
"start_time": "8:30 PM"
|
||||
"active": 0
|
||||
},
|
||||
"43": {
|
||||
"active": 1,
|
||||
"start_time": "9:00 PM",
|
||||
"end_time": "10:00 PM",
|
||||
"start_time": "9:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"44": {
|
||||
"active": 1,
|
||||
"start_time": "9:30 PM",
|
||||
"end_time": "10:30 PM",
|
||||
"start_time": "9:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"45": {
|
||||
"active": 1,
|
||||
"end_time": "11:00 AM",
|
||||
"start_time": "10:00 PM"
|
||||
"start_time": "10:00 PM",
|
||||
"end_time": "11:00 PM",
|
||||
"active": 1
|
||||
},
|
||||
"46": {
|
||||
"active": 1,
|
||||
"start_time": "10:30 PM",
|
||||
"end_time": "11:30 PM",
|
||||
"start_time": "10:30 PM"
|
||||
"active": 1
|
||||
},
|
||||
"47": {
|
||||
"active": 1,
|
||||
"start_time": "11:00 PM",
|
||||
"end_time": "12:00 AM",
|
||||
"start_time": "11:00 PM"
|
||||
"active": 1
|
||||
},
|
||||
"48": {
|
||||
"active": 1,
|
||||
"start_time": "11:30 PM",
|
||||
"end_time": "12:30 AM",
|
||||
"start_time": "11:30 PM"
|
||||
"active": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"service_type": "electricity"
|
||||
},
|
||||
"status": 1
|
||||
}
|
||||
|
23
tests/components/electric_kiwi/fixtures/session.json
Normal file
23
tests/components/electric_kiwi/fixtures/session.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"data": {
|
||||
"data": {
|
||||
"type": "session",
|
||||
"avatar": [],
|
||||
"customer_number": 123456,
|
||||
"customer_name": "Joe Dirt",
|
||||
"email": "joe@dirt.kiwi",
|
||||
"customer_status": "Y",
|
||||
"services": [
|
||||
{
|
||||
"service": "Electricity",
|
||||
"identifier": "00000000DDA",
|
||||
"is_primary_service": true,
|
||||
"service_status": "Y"
|
||||
}
|
||||
],
|
||||
"res_partner_id": 285554,
|
||||
"nuid": "EK_GUID"
|
||||
}
|
||||
},
|
||||
"status": 1
|
||||
}
|
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"data": {
|
||||
"data": {
|
||||
"type": "session",
|
||||
"avatar": [],
|
||||
"customer_number": 123456,
|
||||
"customer_name": "Joe Dirt",
|
||||
"email": "joe@dirt.kiwi",
|
||||
"customer_status": "Y",
|
||||
"services": [],
|
||||
"res_partner_id": 285554,
|
||||
"nuid": "EK_GUID"
|
||||
}
|
||||
},
|
||||
"status": 1
|
||||
}
|
@@ -3,70 +3,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from http import HTTPStatus
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from electrickiwi_api.exceptions import ApiException
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.application_credentials import (
|
||||
ClientCredential,
|
||||
async_import_client_credential,
|
||||
)
|
||||
from homeassistant.components.electric_kiwi.const import (
|
||||
DOMAIN,
|
||||
OAUTH2_AUTHORIZE,
|
||||
OAUTH2_TOKEN,
|
||||
SCOPE_VALUES,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_USER
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .conftest import CLIENT_ID, CLIENT_SECRET, REDIRECT_URI
|
||||
from .conftest import CLIENT_ID, REDIRECT_URI
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_setup_entry")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def setup_credentials(hass: HomeAssistant) -> None:
|
||||
"""Fixture to setup application credentials component."""
|
||||
await async_setup_component(hass, "application_credentials", {})
|
||||
await async_import_client_credential(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ClientCredential(CLIENT_ID, CLIENT_SECRET),
|
||||
)
|
||||
|
||||
|
||||
async def test_config_flow_no_credentials(hass: HomeAssistant) -> None:
|
||||
"""Test config flow base case with no credentials registered."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result.get("type") is FlowResultType.ABORT
|
||||
assert result.get("reason") == "missing_credentials"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
@pytest.mark.usefixtures("current_request_with_host", "electrickiwi_api")
|
||||
async def test_full_flow(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
setup_credentials: None,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Check full flow."""
|
||||
await async_import_client_credential(
|
||||
hass, DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET)
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN}
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
state = config_entry_oauth2_flow._encode_jwt(
|
||||
hass,
|
||||
@@ -76,13 +46,13 @@ async def test_full_flow(
|
||||
},
|
||||
)
|
||||
|
||||
URL_SCOPE = SCOPE_VALUES.replace(" ", "+")
|
||||
url_scope = SCOPE_VALUES.replace(" ", "+")
|
||||
|
||||
assert result["url"] == (
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URI}"
|
||||
f"&state={state}"
|
||||
f"&scope={URL_SCOPE}"
|
||||
f"&scope={url_scope}"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
@@ -90,6 +60,7 @@ async def test_full_flow(
|
||||
assert resp.status == HTTPStatus.OK
|
||||
assert resp.headers["content-type"] == "text/html; charset=utf-8"
|
||||
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.post(
|
||||
OAUTH2_TOKEN,
|
||||
json={
|
||||
@@ -106,20 +77,73 @@ async def test_full_flow(
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
async def test_flow_failure(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
electrickiwi_api: AsyncMock,
|
||||
) -> None:
|
||||
"""Check failure on creation of entry."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
state = config_entry_oauth2_flow._encode_jwt(
|
||||
hass,
|
||||
{
|
||||
"flow_id": result["flow_id"],
|
||||
"redirect_uri": REDIRECT_URI,
|
||||
},
|
||||
)
|
||||
|
||||
url_scope = SCOPE_VALUES.replace(" ", "+")
|
||||
|
||||
assert result["url"] == (
|
||||
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
|
||||
f"&redirect_uri={REDIRECT_URI}"
|
||||
f"&state={state}"
|
||||
f"&scope={url_scope}"
|
||||
)
|
||||
|
||||
client = await hass_client_no_auth()
|
||||
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
|
||||
assert resp.status == HTTPStatus.OK
|
||||
assert resp.headers["content-type"] == "text/html; charset=utf-8"
|
||||
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.post(
|
||||
OAUTH2_TOKEN,
|
||||
json={
|
||||
"refresh_token": "mock-refresh-token",
|
||||
"access_token": "mock-access-token",
|
||||
"type": "Bearer",
|
||||
"expires_in": 60,
|
||||
},
|
||||
)
|
||||
|
||||
electrickiwi_api.get_active_session.side_effect = ApiException()
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
|
||||
assert result.get("type") is FlowResultType.ABORT
|
||||
assert result.get("reason") == "connection_error"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("current_request_with_host")
|
||||
async def test_existing_entry(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
setup_credentials: None,
|
||||
config_entry: MockConfigEntry,
|
||||
migrated_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Check existing entry."""
|
||||
config_entry.add_to_hass(hass)
|
||||
migrated_config_entry.add_to_hass(hass)
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN}
|
||||
DOMAIN, context={"source": SOURCE_USER, "entry_id": DOMAIN}
|
||||
)
|
||||
|
||||
state = config_entry_oauth2_flow._encode_jwt(
|
||||
@@ -145,7 +169,9 @@ async def test_existing_entry(
|
||||
},
|
||||
)
|
||||
|
||||
await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
assert result.get("type") is FlowResultType.ABORT
|
||||
assert result.get("reason") == "already_configured"
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
|
||||
@@ -154,13 +180,13 @@ async def test_reauthentication(
|
||||
hass: HomeAssistant,
|
||||
hass_client_no_auth: ClientSessionGenerator,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
mock_setup_entry: MagicMock,
|
||||
config_entry: MockConfigEntry,
|
||||
setup_credentials: None,
|
||||
mock_setup_entry: AsyncMock,
|
||||
migrated_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test Electric Kiwi reauthentication."""
|
||||
config_entry.add_to_hass(hass)
|
||||
result = await config_entry.start_reauth_flow(hass)
|
||||
migrated_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await migrated_config_entry.start_reauth_flow(hass)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "reauth_confirm"
|
||||
|
||||
@@ -189,8 +215,11 @@ async def test_reauthentication(
|
||||
},
|
||||
)
|
||||
|
||||
await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"])
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
assert result.get("type") is FlowResultType.ABORT
|
||||
assert result.get("reason") == "reauth_successful"
|
||||
|
135
tests/components/electric_kiwi/test_init.py
Normal file
135
tests/components/electric_kiwi/test_init.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""Test the Electric Kiwi init."""
|
||||
|
||||
import http
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aiohttp import RequestInfo
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from electrickiwi_api.exceptions import ApiException, AuthException
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.electric_kiwi.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import init_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test a successful setup entry and unload of entry."""
|
||||
await init_integration(hass, config_entry)
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
assert await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
async def test_async_setup_multiple_entries(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
config_entry2: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test a successful setup and unload of multiple entries."""
|
||||
|
||||
for entry in (config_entry, config_entry2):
|
||||
await init_integration(hass, entry)
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 2
|
||||
|
||||
for entry in (config_entry, config_entry2):
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("status", "expected_state"),
|
||||
[
|
||||
(
|
||||
http.HTTPStatus.UNAUTHORIZED,
|
||||
ConfigEntryState.SETUP_ERROR,
|
||||
),
|
||||
(
|
||||
http.HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
ConfigEntryState.SETUP_RETRY,
|
||||
),
|
||||
],
|
||||
ids=["failure_requires_reauth", "transient_failure"],
|
||||
)
|
||||
async def test_refresh_token_validity_failures(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
status: http.HTTPStatus,
|
||||
expected_state: ConfigEntryState,
|
||||
) -> None:
|
||||
"""Test token refresh failure status."""
|
||||
with patch(
|
||||
"homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session.async_ensure_token_valid",
|
||||
side_effect=ClientResponseError(
|
||||
RequestInfo("", "POST", {}, ""), None, status=status
|
||||
),
|
||||
) as mock_async_ensure_token_valid:
|
||||
await init_integration(hass, config_entry)
|
||||
mock_async_ensure_token_valid.assert_called_once()
|
||||
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
assert entries[0].state is expected_state
|
||||
|
||||
|
||||
async def test_unique_id_migration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test that the unique ID is migrated to the customer number."""
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
entity_registry.async_get_or_create(
|
||||
SENSOR_DOMAIN, DOMAIN, "123456_515363_sensor", config_entry=config_entry
|
||||
)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
new_entry = hass.config_entries.async_get_entry(config_entry.entry_id)
|
||||
assert new_entry.minor_version == 2
|
||||
assert new_entry.unique_id == "123456"
|
||||
entity_entry = entity_registry.async_get(
|
||||
"sensor.electric_kiwi_123456_515363_sensor"
|
||||
)
|
||||
assert entity_entry.unique_id == "123456_00000000DDA_sensor"
|
||||
|
||||
|
||||
async def test_unique_id_migration_failure(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, electrickiwi_api: AsyncMock
|
||||
) -> None:
|
||||
"""Test that the unique ID is migrated to the customer number."""
|
||||
electrickiwi_api.set_active_session.side_effect = ApiException()
|
||||
await init_integration(hass, config_entry)
|
||||
|
||||
assert config_entry.minor_version == 1
|
||||
assert config_entry.unique_id == DOMAIN
|
||||
assert config_entry.state is ConfigEntryState.MIGRATION_ERROR
|
||||
|
||||
|
||||
async def test_unique_id_migration_auth_failure(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry, electrickiwi_api: AsyncMock
|
||||
) -> None:
|
||||
"""Test that the unique ID is migrated to the customer number."""
|
||||
electrickiwi_api.set_active_session.side_effect = AuthException()
|
||||
await init_integration(hass, config_entry)
|
||||
|
||||
assert config_entry.minor_version == 1
|
||||
assert config_entry.unique_id == DOMAIN
|
||||
assert config_entry.state is ConfigEntryState.MIGRATION_ERROR
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user