Compare commits

...

7 Commits

Author SHA1 Message Date
mib1185
0b1ed36b74 add setup tests 2025-11-18 21:55:35 +00:00
mib1185
ef1ab3fc61 add more service tests 2025-11-18 21:28:35 +00:00
mib1185
2a54d812ba add service tests 2025-11-18 18:02:45 +00:00
mib1185
93a2504f23 add sensor test 2025-11-18 17:51:43 +00:00
mib1185
e1b03faac0 dedub code in update tests 2025-11-18 17:50:17 +00:00
Artur Pragacz
963e27dda4 Send snapshot analytics for device database in dev (#155717) 2025-11-18 16:15:27 +00:00
Yuxin Wang
b8e3d57fea Deprecate useless sensors in APCUPSD integration (#151525) 2025-11-18 17:09:38 +01:00
15 changed files with 1722 additions and 97 deletions

View File

@@ -6,9 +6,8 @@ import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import Event, HassJob, HomeAssistant, callback
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import async_call_later, async_track_time_interval
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
@@ -20,7 +19,7 @@ from .analytics import (
EntityAnalyticsModifications,
async_devices_payload,
)
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
from .http import AnalyticsDevicesView
__all__ = [
@@ -43,28 +42,9 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
# Load stored data
await analytics.load()
@callback
def start_schedule(_event: Event) -> None:
async def start_schedule(_event: Event) -> None:
"""Start the send schedule after the started event."""
# Wait 15 min after started
async_call_later(
hass,
900,
HassJob(
analytics.send_analytics,
name="analytics schedule",
cancel_on_shutdown=True,
),
)
# Send every day
async_track_time_interval(
hass,
analytics.send_analytics,
INTERVAL,
name="analytics daily",
cancel_on_shutdown=True,
)
await analytics.async_schedule()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
@@ -111,7 +91,7 @@ async def websocket_analytics_preferences(
analytics = hass.data[DATA_COMPONENT]
await analytics.save_preferences(preferences)
await analytics.send_analytics()
await analytics.async_schedule()
connection.send_result(
msg["id"],

View File

@@ -7,6 +7,8 @@ from asyncio import timeout
from collections.abc import Awaitable, Callable, Iterable, Mapping
from dataclasses import asdict as dataclass_asdict, dataclass, field
from datetime import datetime
import random
import time
from typing import Any, Protocol
import uuid
@@ -31,10 +33,18 @@ from homeassistant.const import (
BASE_PLATFORMS,
__version__ as HA_VERSION,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import (
CALLBACK_TYPE,
HassJob,
HomeAssistant,
ReleaseChannel,
callback,
get_release_channel,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later, async_track_time_interval
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store
@@ -51,6 +61,7 @@ from homeassistant.setup import async_get_loaded_integrations
from .const import (
ANALYTICS_ENDPOINT_URL,
ANALYTICS_ENDPOINT_URL_DEV,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
ATTR_ADDON_COUNT,
ATTR_ADDONS,
ATTR_ARCH,
@@ -71,6 +82,7 @@ from .const import (
ATTR_PROTECTED,
ATTR_RECORDER,
ATTR_SLUG,
ATTR_SNAPSHOTS,
ATTR_STATE_COUNT,
ATTR_STATISTICS,
ATTR_SUPERVISOR,
@@ -80,8 +92,10 @@ from .const import (
ATTR_UUID,
ATTR_VERSION,
DOMAIN,
INTERVAL,
LOGGER,
PREFERENCE_SCHEMA,
SNAPSHOT_VERSION,
STORAGE_KEY,
STORAGE_VERSION,
)
@@ -194,13 +208,18 @@ def gen_uuid() -> str:
return uuid.uuid4().hex
RELEASE_CHANNEL = get_release_channel()
@dataclass
class AnalyticsData:
"""Analytics data."""
onboarded: bool
preferences: dict[str, bool]
uuid: str | None
uuid: str | None = None
submission_identifier: str | None = None
snapshot_submission_time: float | None = None
@classmethod
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
@@ -209,6 +228,8 @@ class AnalyticsData:
data["onboarded"],
data["preferences"],
data["uuid"],
data.get("submission_identifier"),
data.get("snapshot_submission_time"),
)
@@ -219,8 +240,10 @@ class Analytics:
"""Initialize the Analytics class."""
self.hass: HomeAssistant = hass
self.session = async_get_clientsession(hass)
self._data = AnalyticsData(False, {}, None)
self._data = AnalyticsData(False, {})
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
self._basic_scheduled: CALLBACK_TYPE | None = None
self._snapshot_scheduled: CALLBACK_TYPE | None = None
@property
def preferences(self) -> dict:
@@ -228,6 +251,7 @@ class Analytics:
preferences = self._data.preferences
return {
ATTR_BASE: preferences.get(ATTR_BASE, False),
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
@@ -244,9 +268,9 @@ class Analytics:
return self._data.uuid
@property
def endpoint(self) -> str:
def endpoint_basic(self) -> str:
"""Return the endpoint that will receive the payload."""
if HA_VERSION.endswith("0.dev0"):
if RELEASE_CHANNEL is ReleaseChannel.DEV:
# dev installations will contact the dev analytics environment
return ANALYTICS_ENDPOINT_URL_DEV
return ANALYTICS_ENDPOINT_URL
@@ -277,13 +301,17 @@ class Analytics:
):
self._data.preferences[ATTR_DIAGNOSTICS] = False
async def _save(self) -> None:
"""Save data."""
await self._store.async_save(dataclass_asdict(self._data))
async def save_preferences(self, preferences: dict) -> None:
"""Save preferences."""
preferences = PREFERENCE_SCHEMA(preferences)
self._data.preferences.update(preferences)
self._data.onboarded = True
await self._store.async_save(dataclass_asdict(self._data))
await self._save()
if self.supervisor:
await hassio.async_update_diagnostics(
@@ -292,17 +320,16 @@ class Analytics:
async def send_analytics(self, _: datetime | None = None) -> None:
"""Send analytics."""
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
return
hass = self.hass
supervisor_info = None
operating_system_info: dict[str, Any] = {}
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
LOGGER.debug("Nothing to submit")
return
if self._data.uuid is None:
self._data.uuid = gen_uuid()
await self._store.async_save(dataclass_asdict(self._data))
await self._save()
if self.supervisor:
supervisor_info = hassio.get_supervisor_info(hass)
@@ -436,7 +463,7 @@ class Analytics:
try:
async with timeout(30):
response = await self.session.post(self.endpoint, json=payload)
response = await self.session.post(self.endpoint_basic, json=payload)
if response.status == 200:
LOGGER.info(
(
@@ -449,7 +476,7 @@ class Analytics:
LOGGER.warning(
"Sending analytics failed with statuscode %s from %s",
response.status,
self.endpoint,
self.endpoint_basic,
)
except TimeoutError:
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
@@ -489,6 +516,182 @@ class Analytics:
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
)
async def send_snapshot(self, _: datetime | None = None) -> None:
"""Send a snapshot."""
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
return
payload = await _async_snapshot_payload(self.hass)
headers = {
"Content-Type": "application/json",
"User-Agent": f"home-assistant/{HA_VERSION}",
}
if self._data.submission_identifier is not None:
headers["X-Device-Database-Submission-Identifier"] = (
self._data.submission_identifier
)
try:
async with timeout(30):
response = await self.session.post(
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
)
if response.status == 200: # OK
response_data = await response.json()
new_identifier = response_data.get("submission_identifier")
if (
new_identifier is not None
and new_identifier != self._data.submission_identifier
):
self._data.submission_identifier = new_identifier
await self._save()
LOGGER.info(
"Submitted snapshot analytics to Home Assistant servers"
)
elif response.status == 400: # Bad Request
response_data = await response.json()
error_kind = response_data.get("kind", "unknown")
error_message = response_data.get("message", "Unknown error")
if error_kind == "invalid-submission-identifier":
# Clear the invalid identifier and retry on next cycle
LOGGER.warning(
"Invalid submission identifier to %s, clearing: %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
error_message,
)
self._data.submission_identifier = None
await self._save()
else:
LOGGER.warning(
"Malformed snapshot analytics submission (%s) to %s: %s",
error_kind,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
error_message,
)
elif response.status == 503: # Service Unavailable
response_text = await response.text()
LOGGER.warning(
"Snapshot analytics service %s unavailable: %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
response_text,
)
else:
LOGGER.warning(
"Unexpected status code %s when submitting snapshot analytics to %s",
response.status,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
)
except TimeoutError:
LOGGER.error(
"Timeout sending snapshot analytics to %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
)
except aiohttp.ClientError as err:
LOGGER.error(
"Error sending snapshot analytics to %s: %r",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
err,
)
async def async_schedule(self) -> None:
"""Schedule analytics."""
if not self.onboarded:
LOGGER.debug("Analytics not scheduled")
if self._basic_scheduled is not None:
self._basic_scheduled()
self._basic_scheduled = None
if self._snapshot_scheduled:
self._snapshot_scheduled()
self._snapshot_scheduled = None
return
if not self.preferences.get(ATTR_BASE, False):
LOGGER.debug("Basic analytics not scheduled")
if self._basic_scheduled is not None:
self._basic_scheduled()
self._basic_scheduled = None
elif self._basic_scheduled is None:
# Wait 15 min after started for basic analytics
self._basic_scheduled = async_call_later(
self.hass,
900,
HassJob(
self._async_schedule_basic,
name="basic analytics schedule",
cancel_on_shutdown=True,
),
)
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
ReleaseChannel.DEV,
ReleaseChannel.NIGHTLY,
):
LOGGER.debug("Snapshot analytics not scheduled")
if self._snapshot_scheduled:
self._snapshot_scheduled()
self._snapshot_scheduled = None
elif self._snapshot_scheduled is None:
snapshot_submission_time = self._data.snapshot_submission_time
if snapshot_submission_time is None:
# Randomize the submission time within the 24 hours
snapshot_submission_time = random.uniform(0, 86400)
self._data.snapshot_submission_time = snapshot_submission_time
await self._save()
LOGGER.debug(
"Initialized snapshot submission time to %s",
snapshot_submission_time,
)
# Calculate delay until next submission
current_time = time.time()
delay = (snapshot_submission_time - current_time) % 86400
self._snapshot_scheduled = async_call_later(
self.hass,
delay,
HassJob(
self._async_schedule_snapshots,
name="snapshot analytics schedule",
cancel_on_shutdown=True,
),
)
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
"""Schedule basic analytics."""
await self.send_analytics()
# Send basic analytics every day
self._basic_scheduled = async_track_time_interval(
self.hass,
self.send_analytics,
INTERVAL,
name="basic analytics daily",
cancel_on_shutdown=True,
)
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
"""Schedule snapshot analytics."""
await self.send_snapshot()
# Send snapshot analytics every day
self._snapshot_scheduled = async_track_time_interval(
self.hass,
self.send_snapshot,
INTERVAL,
name="snapshot analytics daily",
cancel_on_shutdown=True,
)
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
"""Extract domains from the YAML configuration."""
@@ -505,8 +708,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices."""
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices for a snapshot."""
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
@@ -711,8 +914,13 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
entities_info.append(entity_info)
return integrations_info
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return detailed information about entities and devices for a direct download."""
return {
"version": "home-assistant:1",
"version": f"home-assistant:{SNAPSHOT_VERSION}",
"home_assistant": HA_VERSION,
"integrations": integrations_info,
"integrations": await _async_snapshot_payload(hass),
}

View File

@@ -7,6 +7,8 @@ import voluptuous as vol
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
SNAPSHOT_VERSION = "1"
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
DOMAIN = "analytics"
INTERVAL = timedelta(days=1)
STORAGE_KEY = "core.analytics"
@@ -38,6 +40,7 @@ ATTR_PREFERENCES = "preferences"
ATTR_PROTECTED = "protected"
ATTR_RECORDER = "recorder"
ATTR_SLUG = "slug"
ATTR_SNAPSHOTS = "snapshots"
ATTR_STATE_COUNT = "state_count"
ATTR_STATISTICS = "statistics"
ATTR_SUPERVISOR = "supervisor"
@@ -51,6 +54,7 @@ ATTR_VERSION = "version"
PREFERENCE_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_BASE): bool,
vol.Optional(ATTR_SNAPSHOTS): bool,
vol.Optional(ATTR_DIAGNOSTICS): bool,
vol.Optional(ATTR_STATISTICS): bool,
vol.Optional(ATTR_USAGE): bool,

View File

@@ -7,3 +7,26 @@ CONNECTION_TIMEOUT: int = 10
# Field name of last self test retrieved from apcupsd.
LAST_S_TEST: Final = "laststest"
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
# repair issue translation keys.
DEPRECATED_SENSORS: Final = {
"apc": "apc_deprecated",
"end apc": "date_deprecated",
"date": "date_deprecated",
"apcmodel": "available_via_device_info",
"model": "available_via_device_info",
"firmware": "available_via_device_info",
"version": "available_via_device_info",
"upsname": "available_via_device_info",
"serialno": "available_via_device_info",
}
AVAILABLE_VIA_DEVICE_ATTR: Final = {
"apcmodel": "model",
"model": "model",
"firmware": "hw_version",
"version": "sw_version",
"upsname": "name",
"serialno": "serial_number",
}

View File

@@ -4,6 +4,8 @@ from __future__ import annotations
import logging
from homeassistant.components.automation import automations_with_entity
from homeassistant.components.script import scripts_with_entity
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -22,9 +24,11 @@ from homeassistant.const import (
UnitOfTime,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.issue_registry as ir
from .const import LAST_S_TEST
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
from .entity import APCUPSdEntity
@@ -528,3 +532,62 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
if not self.native_unit_of_measurement:
self._attr_native_unit_of_measurement = inferred_unit
async def async_added_to_hass(self) -> None:
"""Handle when entity is added to Home Assistant.
If this is a deprecated sensor entity, create a repair issue to guide
the user to disable it.
"""
await super().async_added_to_hass()
if not self.enabled:
return
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
if not reason:
return
automations = automations_with_entity(self.hass, self.entity_id)
scripts = scripts_with_entity(self.hass, self.entity_id)
if not automations and not scripts:
return
entity_registry = er.async_get(self.hass)
items = [
f"- [{entry.name or entry.original_name or entity_id}]"
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
for integration, entities in (
("automation", automations),
("script", scripts),
)
for entity_id in entities
if (entry := entity_registry.async_get(entity_id))
]
placeholders = {
"entity_name": str(self.name or self.entity_id),
"entity_id": self.entity_id,
"items": "\n".join(items),
}
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
placeholders["available_via_device_attr"] = via_attr
if device_entry := self.device_entry:
placeholders["device_id"] = device_entry.id
ir.async_create_issue(
self.hass,
DOMAIN,
f"{reason}_{self.entity_id}",
breaks_in_ha_version="2026.6.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key=reason,
translation_placeholders=placeholders,
)
async def async_will_remove_from_hass(self) -> None:
"""Handle when entity will be removed from Home Assistant."""
await super().async_will_remove_from_hass()
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")

View File

@@ -241,5 +241,19 @@
"cannot_connect": {
"message": "Cannot connect to APC UPS Daemon."
}
},
"issues": {
"apc_deprecated": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
},
"available_via_device_info": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
},
"date_deprecated": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
}
}
}

View File

@@ -0,0 +1,393 @@
# serializer version: 1
# name: test_sensors[sensor.adguard_home_average_processing_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_average_processing_speed',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Average processing speed',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'average_processing_speed',
'unique_id': 'adguard_127.0.0.1_3000_sensor_average_speed',
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
})
# ---
# name: test_sensors[sensor.adguard_home_average_processing_speed-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Average processing speed',
'unit_of_measurement': <UnitOfTime.MILLISECONDS: 'ms'>,
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_average_processing_speed',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '31.41',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_dns_queries',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'DNS queries',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'dns_queries',
'unique_id': 'adguard_127.0.0.1_3000_sensor_dns_queries',
'unit_of_measurement': 'queries',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home DNS queries',
'unit_of_measurement': 'queries',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_dns_queries',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '666',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_dns_queries_blocked',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'DNS queries blocked',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'dns_queries_blocked',
'unique_id': 'adguard_127.0.0.1_3000_sensor_blocked_filtering',
'unit_of_measurement': 'queries',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home DNS queries blocked',
'unit_of_measurement': 'queries',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_dns_queries_blocked',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1337',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked_ratio-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_dns_queries_blocked_ratio',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'DNS queries blocked ratio',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'dns_queries_blocked_ratio',
'unique_id': 'adguard_127.0.0.1_3000_sensor_blocked_percentage',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked_ratio-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home DNS queries blocked ratio',
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_dns_queries_blocked_ratio',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '200.75',
})
# ---
# name: test_sensors[sensor.adguard_home_parental_control_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_parental_control_blocked',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Parental control blocked',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'parental_control_blocked',
'unique_id': 'adguard_127.0.0.1_3000_sensor_blocked_parental',
'unit_of_measurement': 'requests',
})
# ---
# name: test_sensors[sensor.adguard_home_parental_control_blocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Parental control blocked',
'unit_of_measurement': 'requests',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_parental_control_blocked',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '13',
})
# ---
# name: test_sensors[sensor.adguard_home_rules_count-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_rules_count',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Rules count',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'rules_count',
'unique_id': 'adguard_127.0.0.1_3000_sensor_rules_count',
'unit_of_measurement': 'rules',
})
# ---
# name: test_sensors[sensor.adguard_home_rules_count-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Rules count',
'unit_of_measurement': 'rules',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_rules_count',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '100',
})
# ---
# name: test_sensors[sensor.adguard_home_safe_browsing_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_safe_browsing_blocked',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Safe browsing blocked',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'safe_browsing_blocked',
'unique_id': 'adguard_127.0.0.1_3000_sensor_blocked_safebrowsing',
'unit_of_measurement': 'requests',
})
# ---
# name: test_sensors[sensor.adguard_home_safe_browsing_blocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Safe browsing blocked',
'unit_of_measurement': 'requests',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_safe_browsing_blocked',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '42',
})
# ---
# name: test_sensors[sensor.adguard_home_safe_searches_enforced-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.adguard_home_safe_searches_enforced',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Safe searches enforced',
'platform': 'adguard',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'safe_searches_enforced',
'unique_id': 'adguard_127.0.0.1_3000_sensor_enforced_safesearch',
'unit_of_measurement': 'requests',
})
# ---
# name: test_sensors[sensor.adguard_home_safe_searches_enforced-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'AdGuard Home Safe searches enforced',
'unit_of_measurement': 'requests',
}),
'context': <ANY>,
'entity_id': 'sensor.adguard_home_safe_searches_enforced',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '18',
})
# ---

View File

@@ -0,0 +1,41 @@
"""Tests for the AdGuard Home."""
from unittest.mock import patch
from adguardhome import AdGuardHomeConnectionError
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from . import setup_integration
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_setup(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard setup."""
with patch("homeassistant.components.adguard.PLATFORMS", []):
await setup_integration(hass, mock_config_entry, aioclient_mock)
assert mock_config_entry.state is ConfigEntryState.LOADED
async def test_setup_failed(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard setup failed."""
mock_config_entry.add_to_hass(hass)
aioclient_mock.get(
"https://127.0.0.1:3000/control/status",
exc=AdGuardHomeConnectionError("Connection error"),
)
assert not await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY

View File

@@ -0,0 +1,48 @@
"""Tests for the AdGuard Home sensor entities."""
from unittest.mock import patch
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.const import CONTENT_TYPE_JSON, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
from tests.test_util.aiohttp import AiohttpClientMocker
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_sensors(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
aioclient_mock: AiohttpClientMocker,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard sensor platform."""
aioclient_mock.get(
"https://127.0.0.1:3000/control/stats",
json={
"num_dns_queries": 666,
"num_blocked_filtering": 1337,
"num_replaced_safebrowsing": 42,
"num_replaced_parental": 13,
"num_replaced_safesearch": 18,
"avg_processing_time": 0.03141,
},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://127.0.0.1:3000/control/filtering/status",
json={"filters": [{"rules_count": 99}, {"rules_count": 1}]},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.SENSOR]):
await setup_integration(hass, mock_config_entry, aioclient_mock)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)

View File

@@ -0,0 +1,184 @@
"""Tests for the AdGuard Home sensor entities."""
from unittest.mock import patch
import pytest
from homeassistant.components.adguard.const import (
DOMAIN,
SERVICE_ADD_URL,
SERVICE_DISABLE_URL,
SERVICE_ENABLE_URL,
SERVICE_REFRESH,
SERVICE_REMOVE_URL,
)
from homeassistant.const import CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant
from . import setup_integration
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_service_registration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard services be registered."""
with patch("homeassistant.components.adguard.PLATFORMS", []):
await setup_integration(hass, mock_config_entry, aioclient_mock)
services = hass.services.async_services_for_domain(DOMAIN)
assert len(services) == 5
assert SERVICE_ADD_URL in services
assert SERVICE_DISABLE_URL in services
assert SERVICE_ENABLE_URL in services
assert SERVICE_REFRESH in services
assert SERVICE_REMOVE_URL in services
async def test_service_unregistration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard services be unregistered with unloading last entry."""
with patch("homeassistant.components.adguard.PLATFORMS", []):
await setup_integration(hass, mock_config_entry, aioclient_mock)
services = hass.services.async_services_for_domain(DOMAIN)
assert len(services) == 5
await hass.config_entries.async_unload(mock_config_entry.entry_id)
await hass.async_block_till_done()
services = hass.services.async_services_for_domain(DOMAIN)
assert len(services) == 0
@pytest.mark.parametrize(
("service", "mocked_requests", "service_call_data"),
[
(
SERVICE_ADD_URL,
[
{
"method": "post",
"url": "https://127.0.0.1:3000/control/filtering/add_url",
"json": {
"whitelist": False,
"name": "Example",
"url": "https://example.com/1.txt",
},
}
],
{"name": "Example", "url": "https://example.com/1.txt"},
),
(
SERVICE_DISABLE_URL,
[
{
"method": "get",
"url": "https://127.0.0.1:3000/control/filtering/status",
"json": {
"filters": [
{
"name": "Example",
"url": "https://example.com/1.txt",
}
],
},
},
{
"method": "post",
"url": "https://127.0.0.1:3000/control/filtering/set_url",
"json": {
"whitelist": False,
"url": "https://example.com/1.txt",
},
},
],
{"url": "https://example.com/1.txt"},
),
(
SERVICE_ENABLE_URL,
[
{
"method": "get",
"url": "https://127.0.0.1:3000/control/filtering/status",
"json": {
"filters": [
{
"name": "Example",
"url": "https://example.com/1.txt",
}
],
},
},
{
"method": "post",
"url": "https://127.0.0.1:3000/control/filtering/set_url",
"json": {
"whitelist": False,
"url": "https://example.com/1.txt",
},
},
],
{"url": "https://example.com/1.txt"},
),
(
SERVICE_REFRESH,
[
{
"method": "post",
"url": "https://127.0.0.1:3000/control/filtering/refresh?force=false",
"json": {"whitelist": False},
}
],
{"force": False},
),
(
SERVICE_REMOVE_URL,
[
{
"method": "post",
"url": "https://127.0.0.1:3000/control/filtering/remove_url",
"json": {
"whitelist": False,
"url": "https://example.com/1.txt",
},
}
],
{"url": "https://example.com/1.txt"},
),
],
)
async def test_service(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
mock_config_entry: MockConfigEntry,
service: str,
mocked_requests: list[dict],
service_call_data: dict,
) -> None:
"""Test the adguard services be unregistered with unloading last entry."""
for mocked_request in mocked_requests:
aioclient_mock.request(
mocked_request["method"],
mocked_request["url"],
json=mocked_request["json"],
headers={"Content-Type": CONTENT_TYPE_JSON},
)
with patch("homeassistant.components.adguard.PLATFORMS", []):
await setup_integration(hass, mock_config_entry, aioclient_mock)
await hass.services.async_call(
DOMAIN,
service,
service_call_data,
blocking=True,
)

View File

@@ -17,14 +17,8 @@ from tests.common import MockConfigEntry, snapshot_platform
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_update(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
aioclient_mock: AiohttpClientMocker,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard update platform."""
def aioclient_mock_update_available(aioclient_mock: AiohttpClientMocker) -> None:
"""Mock AdGuard Home update available response."""
aioclient_mock.post(
"https://127.0.0.1:3000/control/version.json",
json={
@@ -37,6 +31,17 @@ async def test_update(
headers={"Content-Type": CONTENT_TYPE_JSON},
)
async def test_update(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
aioclient_mock: AiohttpClientMocker,
snapshot: SnapshotAssertion,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard update platform."""
aioclient_mock_update_available(aioclient_mock)
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
await setup_integration(hass, mock_config_entry, aioclient_mock)
@@ -67,17 +72,7 @@ async def test_update_install(
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard update installation."""
aioclient_mock.post(
"https://127.0.0.1:3000/control/version.json",
json={
"new_version": "v0.107.59",
"announcement": "AdGuard Home v0.107.59 is now available!",
"announcement_url": "https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59",
"can_autoupdate": True,
"disabled": False,
},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock_update_available(aioclient_mock)
aioclient_mock.post("https://127.0.0.1:3000/control/update")
with patch("homeassistant.components.adguard.PLATFORMS", [Platform.UPDATE]):
@@ -104,17 +99,7 @@ async def test_update_install_failed(
mock_config_entry: MockConfigEntry,
) -> None:
"""Test the adguard update install failed."""
aioclient_mock.post(
"https://127.0.0.1:3000/control/version.json",
json={
"new_version": "v0.107.59",
"announcement": "AdGuard Home v0.107.59 is now available!",
"announcement_url": "https://github.com/AdguardTeam/AdGuardHome/releases/tag/v0.107.59",
"can_autoupdate": True,
"disabled": False,
},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock_update_available(aioclient_mock)
aioclient_mock.post(
"https://127.0.0.1:3000/control/update", exc=AdGuardHomeError("boom")
)

View File

@@ -1,6 +1,7 @@
"""The tests for the analytics ."""
from collections.abc import Generator
from datetime import timedelta
from http import HTTPStatus
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
@@ -22,8 +23,10 @@ from homeassistant.components.analytics.analytics import (
from homeassistant.components.analytics.const import (
ANALYTICS_ENDPOINT_URL,
ANALYTICS_ENDPOINT_URL_DEV,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
ATTR_BASE,
ATTR_DIAGNOSTICS,
ATTR_SNAPSHOTS,
ATTR_STATISTICS,
ATTR_USAGE,
)
@@ -31,13 +34,20 @@ from homeassistant.components.number import NumberDeviceClass
from homeassistant.components.sensor import SensorDeviceClass
from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState
from homeassistant.const import ATTR_ASSUMED_STATE, EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, ReleaseChannel
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.loader import IntegrationNotFound
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform
from tests.common import (
MockConfigEntry,
MockModule,
async_fire_time_changed,
mock_integration,
mock_platform,
)
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator
@@ -59,9 +69,31 @@ def uuid_mock() -> Generator[None]:
@pytest.fixture(autouse=True)
def ha_version_mock() -> Generator[None]:
"""Mock the core version."""
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
with (
patch(
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
),
patch(
"homeassistant.components.analytics.analytics.RELEASE_CHANNEL",
ReleaseChannel.STABLE,
),
):
yield
@pytest.fixture
def ha_dev_version_mock() -> Generator[None]:
"""Mock the core version as a dev version."""
with (
patch(
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION_DEV,
),
patch(
"homeassistant.components.analytics.analytics.RELEASE_CHANNEL",
ReleaseChannel.DEV,
),
):
yield
@@ -97,7 +129,6 @@ async def test_no_send(
await analytics.send_analytics()
assert "Nothing to submit" in caplog.text
assert len(aioclient_mock.mock_calls) == 0
@@ -615,7 +646,7 @@ async def test_custom_integrations(
assert snapshot == submitted_data
@pytest.mark.usefixtures("supervisor_client")
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
async def test_dev_url(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
@@ -625,16 +656,13 @@ async def test_dev_url(
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
):
await analytics.send_analytics()
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0]
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
@pytest.mark.usefixtures("supervisor_client")
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
async def test_dev_url_error(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
@@ -645,10 +673,7 @@ async def test_dev_url_error(
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
):
await analytics.send_analytics()
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0]
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
@@ -860,7 +885,7 @@ async def test_send_with_problems_loading_yaml(
assert len(aioclient_mock.mock_calls) == 0
@pytest.mark.usefixtures("mock_hass_config", "supervisor_client")
@pytest.mark.usefixtures("ha_dev_version_mock", "mock_hass_config", "supervisor_client")
async def test_timeout_while_sending(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
@@ -871,10 +896,7 @@ async def test_timeout_while_sending(
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, exc=TimeoutError())
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
):
await analytics.send_analytics()
await analytics.send_analytics()
assert "Timeout sending analytics" in caplog.text
@@ -1426,3 +1448,346 @@ async def test_analytics_platforms(
},
},
}
async def test_send_snapshot_disabled(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test no snapshots are sent."""
analytics = Analytics(hass)
await analytics.send_snapshot()
await analytics.save_preferences({ATTR_SNAPSHOTS: False})
await analytics.send_snapshot()
assert len(aioclient_mock.mock_calls) == 0
async def test_send_snapshot_success(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test successful snapshot submission."""
aioclient_mock.post(
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
status=200,
json={"submission_identifier": "test-identifier-123"},
)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_SNAPSHOTS: True})
await analytics.send_snapshot()
assert len(aioclient_mock.mock_calls) == 1
preferences = await analytics._store.async_load()
assert preferences["submission_identifier"] == "test-identifier-123"
assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text
async def test_send_snapshot_with_existing_identifier(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test snapshot submission with existing identifier."""
aioclient_mock.post(
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
status=200,
json={"submission_identifier": "test-identifier-123"},
)
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
"submission_identifier": "old-identifier",
},
):
await analytics.load()
await analytics.send_snapshot()
assert len(aioclient_mock.mock_calls) == 1
call_headers = aioclient_mock.mock_calls[0][3]
assert call_headers["X-Device-Database-Submission-Identifier"] == "old-identifier"
preferences = await analytics._store.async_load()
assert preferences["submission_identifier"] == "test-identifier-123"
assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text
async def test_send_snapshot_invalid_identifier(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test snapshot submission with invalid identifier."""
aioclient_mock.post(
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
status=400,
json={
"kind": "invalid-submission-identifier",
"message": "The identifier is invalid",
},
)
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
"submission_identifier": "invalid-identifier",
},
):
await analytics.load()
await analytics.send_snapshot()
assert len(aioclient_mock.mock_calls) == 1
preferences = await analytics._store.async_load()
assert preferences.get("submission_identifier") is None
assert "Invalid submission identifier" in caplog.text
@pytest.mark.parametrize(
("post_kwargs", "expected_log"),
[
(
{
"status": 400,
"json": {
"kind": "malformed-payload",
"message": "Invalid payload format",
},
},
"Malformed snapshot analytics submission",
),
(
{"status": 503, "text": "Service Unavailable"},
f"Snapshot analytics service {ANALYTICS_SNAPSHOT_ENDPOINT_URL} unavailable",
),
(
{"status": 500},
"Unexpected status code 500 when submitting snapshot analytics",
),
(
{"exc": TimeoutError()},
"Timeout sending snapshot analytics",
),
(
{"exc": aiohttp.ClientError()},
"Error sending snapshot analytics",
),
],
ids=[
"bad_request",
"service_unavailable",
"unexpected_status",
"timeout",
"client_error",
],
)
async def test_send_snapshot_error(
hass: HomeAssistant,
caplog: pytest.LogCaptureFixture,
aioclient_mock: AiohttpClientMocker,
post_kwargs: dict[str, Any],
expected_log: str,
) -> None:
"""Test snapshot submission error."""
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, **post_kwargs)
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
},
):
await analytics.load()
await analytics.send_snapshot()
assert expected_log in caplog.text
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
async def test_async_schedule(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test scheduling."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200)
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, status=200, json={})
analytics = Analytics(hass)
# Schedule when not onboarded
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
# Onboard and enable both
await analytics.save_preferences({ATTR_BASE: True, ATTR_SNAPSHOTS: True})
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert any(
str(call[1]) == ANALYTICS_ENDPOINT_URL_DEV for call in aioclient_mock.mock_calls
)
assert any(
str(call[1]) == ANALYTICS_SNAPSHOT_ENDPOINT_URL
for call in aioclient_mock.mock_calls
)
preferences = await analytics._store.async_load()
assert preferences["snapshot_submission_time"] is not None
assert 0 <= preferences["snapshot_submission_time"] <= 86400
@pytest.mark.usefixtures("ha_dev_version_mock")
async def test_async_schedule_disabled(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test scheduling when disabled."""
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: False, ATTR_SNAPSHOTS: False},
"uuid": "12345",
},
):
await analytics.load()
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
@pytest.mark.usefixtures("supervisor_client")
async def test_async_schedule_snapshots_not_dev(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test that snapshots are not scheduled on non-dev versions."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
},
):
await analytics.load()
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
assert str(aioclient_mock.mock_calls[0][1]) == ANALYTICS_ENDPOINT_URL
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
async def test_async_schedule_already_scheduled(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test not rescheduled if already scheduled."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200)
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, status=200, json={})
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
},
):
await analytics.load()
await analytics.async_schedule()
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 2
assert any(
str(call[1]) == ANALYTICS_ENDPOINT_URL_DEV for call in aioclient_mock.mock_calls
)
assert any(
str(call[1]) == ANALYTICS_SNAPSHOT_ENDPOINT_URL
for call in aioclient_mock.mock_calls
)
@pytest.mark.parametrize(("onboarded"), [True, False])
@pytest.mark.usefixtures("ha_dev_version_mock")
async def test_async_schedule_cancel_when_disabled(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
onboarded: bool,
) -> None:
"""Test that scheduled tasks are cancelled when disabled."""
analytics = Analytics(hass)
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": True,
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
"uuid": "12345",
},
):
await analytics.load()
await analytics.async_schedule()
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value={
"onboarded": onboarded,
"preferences": {ATTR_BASE: False, ATTR_SNAPSHOTS: False},
"uuid": "12345",
},
):
await analytics.load()
await analytics.async_schedule()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0

View File

@@ -45,7 +45,6 @@ async def test_websocket(
{"type": "analytics/preferences", "preferences": {"base": True}}
)
response = await ws_client.receive_json()
assert len(aioclient_mock.mock_calls) == 1
assert response["result"]["preferences"]["base"]
await ws_client.send_json_auto_id({"type": "analytics"})

View File

@@ -1,4 +1,244 @@
# serializer version: 1
# name: test_deprecated_sensor_issue[apc-apc_deprecated]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'apc_deprecated_sensor.myups_status_data',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'apc_deprecated',
'translation_placeholders': dict({
'device_id': '<ANY>',
'entity_id': 'sensor.myups_status_data',
'entity_name': 'Status data',
'items': '''
- [APC UPS automation (apc)](/config/automation/edit/apcupsd_auto_apc)
- [APC UPS script (apc)](/config/script/edit/apcupsd_script_apc)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[apcmodel-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_model',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'model',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_model',
'entity_name': 'Model',
'items': '''
- [APC UPS automation (apcmodel)](/config/automation/edit/apcupsd_auto_apcmodel)
- [APC UPS script (apcmodel)](/config/script/edit/apcupsd_script_apcmodel)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[date-date_deprecated]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'date_deprecated_sensor.myups_status_date',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'date_deprecated',
'translation_placeholders': dict({
'device_id': '<ANY>',
'entity_id': 'sensor.myups_status_date',
'entity_name': 'Status date',
'items': '''
- [APC UPS automation (date)](/config/automation/edit/apcupsd_auto_date)
- [APC UPS script (date)](/config/script/edit/apcupsd_script_date)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[end apc-date_deprecated]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'date_deprecated_sensor.myups_date_and_time',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'date_deprecated',
'translation_placeholders': dict({
'device_id': '<ANY>',
'entity_id': 'sensor.myups_date_and_time',
'entity_name': 'Date and time',
'items': '''
- [APC UPS automation (end apc)](/config/automation/edit/apcupsd_auto_end_apc)
- [APC UPS script (end apc)](/config/script/edit/apcupsd_script_end_apc)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[firmware-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_firmware_version',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'hw_version',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_firmware_version',
'entity_name': 'Firmware version',
'items': '''
- [APC UPS automation (firmware)](/config/automation/edit/apcupsd_auto_firmware)
- [APC UPS script (firmware)](/config/script/edit/apcupsd_script_firmware)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[model-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_model_2',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'model',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_model_2',
'entity_name': 'Model',
'items': '''
- [APC UPS automation (model)](/config/automation/edit/apcupsd_auto_model)
- [APC UPS script (model)](/config/script/edit/apcupsd_script_model)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[serialno-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_serial_number',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'serial_number',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_serial_number',
'entity_name': 'Serial number',
'items': '''
- [APC UPS automation (serialno)](/config/automation/edit/apcupsd_auto_serialno)
- [APC UPS script (serialno)](/config/script/edit/apcupsd_script_serialno)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[upsname-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_name',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'name',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_name',
'entity_name': 'Name',
'items': '''
- [APC UPS automation (upsname)](/config/automation/edit/apcupsd_auto_upsname)
- [APC UPS script (upsname)](/config/script/edit/apcupsd_script_upsname)
''',
}),
})
# ---
# name: test_deprecated_sensor_issue[version-available_via_device_info]
IssueRegistryItemSnapshot({
'active': True,
'breaks_in_ha_version': '2026.6.0',
'created': <ANY>,
'data': None,
'dismissed_version': None,
'domain': 'apcupsd',
'is_fixable': False,
'is_persistent': False,
'issue_domain': None,
'issue_id': 'available_via_device_info_sensor.myups_daemon_version',
'learn_more_url': None,
'severity': <IssueSeverity.WARNING: 'warning'>,
'translation_key': 'available_via_device_info',
'translation_placeholders': dict({
'available_via_device_attr': 'sw_version',
'device_id': '<ANY>',
'entity_id': 'sensor.myups_daemon_version',
'entity_name': 'Daemon version',
'items': '''
- [APC UPS automation (version)](/config/automation/edit/apcupsd_auto_version)
- [APC UPS script (version)](/config/script/edit/apcupsd_script_version)
''',
}),
})
# ---
# name: test_sensor[sensor.myups_alarm_delay-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -6,7 +6,8 @@ from unittest.mock import AsyncMock
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.apcupsd.const import DOMAIN
from homeassistant.components import automation, script
from homeassistant.components.apcupsd.const import DEPRECATED_SENSORS, DOMAIN
from homeassistant.components.apcupsd.coordinator import REQUEST_REFRESH_COOLDOWN
from homeassistant.const import (
ATTR_ENTITY_ID,
@@ -15,7 +16,11 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import (
device_registry as dr,
entity_registry as er,
issue_registry as ir,
)
from homeassistant.setup import async_setup_component
from homeassistant.util import slugify
from homeassistant.util.dt import utcnow
@@ -161,3 +166,76 @@ async def test_sensor_unknown(
await hass.async_block_till_done()
# The state should become unknown again.
assert hass.states.get(last_self_test_id).state == STATE_UNKNOWN
@pytest.mark.parametrize(("entity_key", "issue_key"), DEPRECATED_SENSORS.items())
async def test_deprecated_sensor_issue(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_request_status: AsyncMock,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
entity_key: str,
issue_key: str,
) -> None:
"""Ensure the issue lists automations and scripts referencing a deprecated sensor."""
issue_registry = ir.async_get(hass)
unique_id = f"{mock_request_status.return_value['SERIALNO']}_{entity_key}"
entity_id = entity_registry.async_get_entity_id("sensor", DOMAIN, unique_id)
assert entity_id
# No issue yet.
issue_id = f"{issue_key}_{entity_id}"
assert issue_registry.async_get_issue(DOMAIN, issue_id) is None
# Add automations and scripts referencing the deprecated sensor.
entity_slug = slugify(entity_key)
automation_object_id = f"apcupsd_auto_{entity_slug}"
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"id": automation_object_id,
"alias": f"APC UPS automation ({entity_key})",
"trigger": {"platform": "state", "entity_id": entity_id},
"action": {
"action": "automation.turn_on",
"target": {"entity_id": f"automation.{automation_object_id}"},
},
}
},
)
assert await async_setup_component(
hass,
script.DOMAIN,
{
script.DOMAIN: {
f"apcupsd_script_{entity_slug}": {
"alias": f"APC UPS script ({entity_key})",
"sequence": [
{
"condition": "state",
"entity_id": entity_id,
"state": "on",
}
],
}
}
},
)
await hass.config_entries.async_reload(mock_config_entry.entry_id)
await hass.async_block_till_done()
issue = issue_registry.async_get_issue(DOMAIN, issue_id)
# Redact the device ID in the placeholder for consistency.
issue.translation_placeholders["device_id"] = "<ANY>"
assert issue == snapshot
await hass.config_entries.async_unload(mock_config_entry.entry_id)
await hass.async_block_till_done()
# Assert the issue is no longer present.
assert not issue_registry.async_get_issue(DOMAIN, issue_id)
assert len(issue_registry.issues) == 0