mirror of
https://github.com/home-assistant/core.git
synced 2025-11-10 03:19:34 +00:00
* Add default device class display precision for Sensor * Renaming, docstrings, cleanup * Simplify units list * Fix tests * Fix missing precision when suggested is specified * Update snapshots * Fix when unit of measurement is not valid * Fix tests * Fix deprecated unit usage * Fix goalzero tests The sensor native_value method was accessing the data dict and trowing, since the mock did not have any data for the sensors. Since now the precision is always specified (it was missing for those sensors), the throw was hitting async_update_entity_options in _update_suggested_precision. Previously, async_update_entity_options was not called since it had no precision. * Fix metoffice * Fix smartthings * Add default sensor data for Tesla Wall Connector tests * Update snapshots * Revert spaces * Update smartthings snapshots * Add missing sensor mock for tesla wall connector * Address review comments * Add doc comment * Add cap to doc comment * Update comment * Update snapshots * Update comment
89 lines
2.8 KiB
Python
89 lines
2.8 KiB
Python
"""Test the NZBGet sensors."""
|
|
|
|
from datetime import timedelta
|
|
from unittest.mock import patch
|
|
|
|
import pytest
|
|
|
|
from homeassistant.components.sensor import SensorDeviceClass
|
|
from homeassistant.const import (
|
|
ATTR_UNIT_OF_MEASUREMENT,
|
|
UnitOfDataRate,
|
|
UnitOfInformation,
|
|
)
|
|
from homeassistant.core import HomeAssistant
|
|
from homeassistant.helpers import entity_registry as er
|
|
from homeassistant.util import dt as dt_util
|
|
|
|
from . import init_integration
|
|
|
|
|
|
@pytest.mark.usefixtures("nzbget_api")
|
|
async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None:
|
|
"""Test the creation and values of the sensors."""
|
|
now = dt_util.utcnow().replace(microsecond=0)
|
|
with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now):
|
|
entry = await init_integration(hass)
|
|
|
|
uptime = now - timedelta(seconds=600)
|
|
|
|
sensors = {
|
|
"article_cache": (
|
|
"ArticleCacheMB",
|
|
"64",
|
|
UnitOfInformation.MEGABYTES,
|
|
SensorDeviceClass.DATA_SIZE,
|
|
),
|
|
"average_speed": (
|
|
"AverageDownloadRate",
|
|
"1.25",
|
|
UnitOfDataRate.MEGABYTES_PER_SECOND,
|
|
SensorDeviceClass.DATA_RATE,
|
|
),
|
|
"download_paused": ("DownloadPaused", "False", None, None),
|
|
"speed": (
|
|
"DownloadRate",
|
|
"2.5",
|
|
UnitOfDataRate.MEGABYTES_PER_SECOND,
|
|
SensorDeviceClass.DATA_RATE,
|
|
),
|
|
"size": (
|
|
"DownloadedSizeMB",
|
|
"256",
|
|
UnitOfInformation.MEGABYTES,
|
|
SensorDeviceClass.DATA_SIZE,
|
|
),
|
|
"disk_free": (
|
|
"FreeDiskSpaceMB",
|
|
"1024",
|
|
UnitOfInformation.MEGABYTES,
|
|
SensorDeviceClass.DATA_SIZE,
|
|
),
|
|
"post_processing_jobs": ("PostJobCount", "2", "Jobs", None),
|
|
"post_processing_paused": ("PostPaused", "False", None, None),
|
|
"queue_size": (
|
|
"RemainingSizeMB",
|
|
"512",
|
|
UnitOfInformation.MEGABYTES,
|
|
SensorDeviceClass.DATA_SIZE,
|
|
),
|
|
"uptime": ("UpTimeSec", uptime.isoformat(), None, SensorDeviceClass.TIMESTAMP),
|
|
"speed_limit": (
|
|
"DownloadLimit",
|
|
"1.0",
|
|
UnitOfDataRate.MEGABYTES_PER_SECOND,
|
|
SensorDeviceClass.DATA_RATE,
|
|
),
|
|
}
|
|
|
|
for sensor_id, data in sensors.items():
|
|
entity_entry = entity_registry.async_get(f"sensor.nzbgettest_{sensor_id}")
|
|
assert entity_entry
|
|
assert entity_entry.original_device_class == data[3]
|
|
assert entity_entry.unique_id == f"{entry.entry_id}_{data[0]}"
|
|
|
|
state = hass.states.get(f"sensor.nzbgettest_{sensor_id}")
|
|
assert state
|
|
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == data[2]
|
|
assert state.state == data[1]
|