From 722aa0895ef0ba46981e96e40ca9d244d4b31f91 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 30 Aug 2021 12:51:46 +0200 Subject: [PATCH] Improve statistics error messages when sensor's unit is changing (#55436) * Improve error messages when sensor's unit is changing * Improve test coverage --- homeassistant/components/sensor/recorder.py | 13 +++- tests/components/sensor/test_recorder.py | 85 +++++++++++++++++++++ 2 files changed, 95 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 2b59592dd17..6ab75f88dbd 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -200,11 +200,18 @@ def _normalize_states( hass.data[WARN_UNSTABLE_UNIT] = set() if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: hass.data[WARN_UNSTABLE_UNIT].add(entity_id) + extra = "" + if old_metadata := statistics.get_metadata(hass, entity_id): + extra = ( + " and matches the unit of already compiled statistics " + f"({old_metadata['unit_of_measurement']})" + ) _LOGGER.warning( - "The unit of %s is changing, got %s, generation of long term " - "statistics will be suppressed unless the unit is stable", + "The unit of %s is changing, got multiple %s, generation of long term " + "statistics will be suppressed unless the unit is stable%s", entity_id, all_units, + extra, ) return None, [] unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) @@ -320,7 +327,7 @@ def compile_statistics( entity_id, unit, old_metadata["unit_of_measurement"], - unit, + old_metadata["unit_of_measurement"], ) continue diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 2e300b9c748..6c4c899eb14 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1028,6 +1028,7 @@ def test_compile_hourly_statistics_changing_units_2( recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(minutes=30)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" in caplog.text + assert "and matches the unit of already compiled statistics" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": "cats"} @@ -1038,6 +1039,90 @@ def test_compile_hourly_statistics_changing_units_2( assert "Error while processing event StatisticsTask" not in caplog.text +@pytest.mark.parametrize( + "device_class,unit,native_unit,mean,min,max", + [ + (None, None, None, 16.440677, 10, 30), + (None, "%", "%", 16.440677, 10, 30), + ("battery", "%", "%", 16.440677, 10, 30), + ("battery", None, None, 16.440677, 10, 30), + ], +) +def test_compile_hourly_statistics_changing_units_3( + hass_recorder, caplog, device_class, unit, native_unit, mean, min, max +): + """Test compiling hourly statistics where units change from one hour to the next.""" + zero = dt_util.utcnow() + hass = hass_recorder() + recorder = hass.data[DATA_INSTANCE] + setup_component(hass, "sensor", {}) + attributes = { + "device_class": device_class, + "state_class": "measurement", + "unit_of_measurement": unit, + } + four, states = record_states(hass, zero, "sensor.test1", attributes) + four, _states = record_states( + hass, zero + timedelta(hours=1), "sensor.test1", attributes + ) + states["sensor.test1"] += _states["sensor.test1"] + attributes["unit_of_measurement"] = "cats" + four, _states = record_states( + hass, zero + timedelta(hours=2), "sensor.test1", attributes + ) + states["sensor.test1"] += _states["sensor.test1"] + hist = history.get_significant_states(hass, zero, four) + assert dict(states) == dict(hist) + + recorder.do_adhoc_statistics(period="hourly", start=zero) + wait_recording_done(hass) + assert "does not match the unit of already compiled" not in caplog.text + statistic_ids = list_statistic_ids(hass) + assert statistic_ids == [ + {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} + ] + stats = statistics_during_period(hass, zero) + assert stats == { + "sensor.test1": [ + { + "statistic_id": "sensor.test1", + "start": process_timestamp_to_utc_isoformat(zero), + "mean": approx(mean), + "min": approx(min), + "max": approx(max), + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + + recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + wait_recording_done(hass) + assert "The unit of sensor.test1 is changing" in caplog.text + assert f"matches the unit of already compiled statistics ({unit})" in caplog.text + statistic_ids = list_statistic_ids(hass) + assert statistic_ids == [ + {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} + ] + stats = statistics_during_period(hass, zero) + assert stats == { + "sensor.test1": [ + { + "statistic_id": "sensor.test1", + "start": process_timestamp_to_utc_isoformat(zero), + "mean": approx(mean), + "min": approx(min), + "max": approx(max), + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( "device_class,unit,native_unit,mean,min,max", [