From 92afbf01e7535759b53fc467b31469f1155f96cf Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 4 Oct 2022 03:50:05 +0200 Subject: [PATCH 1/7] Simplify long term statistics by always supporting unit conversion (#79557) --- homeassistant/components/sensor/recorder.py | 186 +++------ tests/components/sensor/test_recorder.py | 441 +++++++++++--------- 2 files changed, 308 insertions(+), 319 deletions(-) diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 144502dd81a..1a72444c758 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -23,22 +23,11 @@ from homeassistant.components.recorder.models import ( StatisticMetaData, StatisticResult, ) -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT +from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity import entity_sources from homeassistant.util import dt as dt_util -from homeassistant.util.unit_conversion import ( - BaseUnitConverter, - DistanceConverter, - EnergyConverter, - MassConverter, - PowerConverter, - PressureConverter, - SpeedConverter, - TemperatureConverter, - VolumeConverter, -) from . import ( ATTR_LAST_RESET, @@ -48,7 +37,6 @@ from . import ( STATE_CLASS_TOTAL, STATE_CLASS_TOTAL_INCREASING, STATE_CLASSES, - SensorDeviceClass, ) _LOGGER = logging.getLogger(__name__) @@ -59,18 +47,6 @@ DEFAULT_STATISTICS = { STATE_CLASS_TOTAL_INCREASING: {"sum"}, } -UNIT_CONVERTERS: dict[str, type[BaseUnitConverter]] = { - SensorDeviceClass.DISTANCE: DistanceConverter, - SensorDeviceClass.ENERGY: EnergyConverter, - SensorDeviceClass.GAS: VolumeConverter, - SensorDeviceClass.POWER: PowerConverter, - SensorDeviceClass.PRESSURE: PressureConverter, - SensorDeviceClass.SPEED: SpeedConverter, - SensorDeviceClass.TEMPERATURE: TemperatureConverter, - SensorDeviceClass.VOLUME: VolumeConverter, - SensorDeviceClass.WEIGHT: MassConverter, -} - # Keep track of entities for which a warning about decreasing value has been logged SEEN_DIP = "sensor_seen_total_increasing_dip" WARN_DIP = "sensor_warn_total_increasing_dip" @@ -154,84 +130,84 @@ def _normalize_states( session: Session, old_metadatas: dict[str, tuple[int, StatisticMetaData]], entity_history: Iterable[State], - device_class: str | None, entity_id: str, ) -> tuple[str | None, str | None, list[tuple[float, State]]]: """Normalize units.""" old_metadata = old_metadatas[entity_id][1] if entity_id in old_metadatas else None state_unit: str | None = None - if device_class not in UNIT_CONVERTERS or ( + fstates: list[tuple[float, State]] = [] + for state in entity_history: + try: + fstate = _parse_float(state.state) + except (ValueError, TypeError): # TypeError to guard for NULL state in DB + continue + fstates.append((fstate, state)) + + if not fstates: + return None, None, fstates + + state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) + + if state_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER or ( old_metadata and old_metadata["unit_of_measurement"] - not in UNIT_CONVERTERS[device_class].VALID_UNITS + not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER ): # We're either not normalizing this device class or this entity is not stored - # in a supported unit, return the states as they are - fstates = [] - for state in entity_history: - try: - fstate = _parse_float(state.state) - except (ValueError, TypeError): # TypeError to guard for NULL state in DB - continue - fstates.append((fstate, state)) + # in a unit which can be converted, return the states as they are - if fstates: - all_units = _get_units(fstates) - if len(all_units) > 1: - if WARN_UNSTABLE_UNIT not in hass.data: - hass.data[WARN_UNSTABLE_UNIT] = set() - if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: - hass.data[WARN_UNSTABLE_UNIT].add(entity_id) - extra = "" - if old_metadata: - extra = ( - " and matches the unit of already compiled statistics " - f"({old_metadata['unit_of_measurement']})" - ) - _LOGGER.warning( - "The unit of %s is changing, got multiple %s, generation of long term " - "statistics will be suppressed unless the unit is stable%s. " - "Go to %s to fix this", - entity_id, - all_units, - extra, - LINK_DEV_STATISTICS, + all_units = _get_units(fstates) + if len(all_units) > 1: + if WARN_UNSTABLE_UNIT not in hass.data: + hass.data[WARN_UNSTABLE_UNIT] = set() + if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: + hass.data[WARN_UNSTABLE_UNIT].add(entity_id) + extra = "" + if old_metadata: + extra = ( + " and matches the unit of already compiled statistics " + f"({old_metadata['unit_of_measurement']})" ) - return None, None, [] - state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) + _LOGGER.warning( + "The unit of %s is changing, got multiple %s, generation of long term " + "statistics will be suppressed unless the unit is stable%s. " + "Go to %s to fix this", + entity_id, + all_units, + extra, + LINK_DEV_STATISTICS, + ) + return None, None, [] + state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) return state_unit, state_unit, fstates - converter = UNIT_CONVERTERS[device_class] - fstates = [] + converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[state_unit] + valid_fstates: list[tuple[float, State]] = [] statistics_unit: str | None = None if old_metadata: statistics_unit = old_metadata["unit_of_measurement"] - for state in entity_history: - try: - fstate = _parse_float(state.state) - except ValueError: - continue + for fstate, state in fstates: state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - # Exclude unsupported units from statistics + # Exclude states with unsupported unit from statistics if state_unit not in converter.VALID_UNITS: if WARN_UNSUPPORTED_UNIT not in hass.data: hass.data[WARN_UNSUPPORTED_UNIT] = set() if entity_id not in hass.data[WARN_UNSUPPORTED_UNIT]: hass.data[WARN_UNSUPPORTED_UNIT].add(entity_id) _LOGGER.warning( - "%s has unit %s which is unsupported for device_class %s", + "%s has unit %s which can't be converted to %s", entity_id, state_unit, - device_class, + statistics_unit, ) continue if statistics_unit is None: statistics_unit = state_unit - fstates.append( + valid_fstates.append( ( converter.convert( fstate, from_unit=state_unit, to_unit=statistics_unit @@ -240,7 +216,7 @@ def _normalize_states( ) ) - return statistics_unit, state_unit, fstates + return statistics_unit, state_unit, valid_fstates def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str: @@ -427,14 +403,12 @@ def _compile_statistics( # noqa: C901 if entity_id not in history_list: continue - device_class = _state.attributes.get(ATTR_DEVICE_CLASS) entity_history = history_list[entity_id] statistics_unit, state_unit, fstates = _normalize_states( hass, session, old_metadatas, entity_history, - device_class, entity_id, ) @@ -467,11 +441,11 @@ def _compile_statistics( # noqa: C901 if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: hass.data[WARN_UNSTABLE_UNIT].add(entity_id) _LOGGER.warning( - "The %sunit of %s (%s) does not match the unit of already " + "The unit of %s (%s) can not be converted to the unit of previously " "compiled statistics (%s). Generation of long term statistics " - "will be suppressed unless the unit changes back to %s. " + "will be suppressed unless the unit changes back to %s or a " + "compatible unit. " "Go to %s to fix this", - "normalized " if device_class in UNIT_CONVERTERS else "", entity_id, statistics_unit, old_metadata[1]["unit_of_measurement"], @@ -603,7 +577,6 @@ def list_statistic_ids( for state in entities: state_class = state.attributes[ATTR_STATE_CLASS] - device_class = state.attributes.get(ATTR_DEVICE_CLASS) state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) provided_statistics = DEFAULT_STATISTICS[state_class] @@ -620,21 +593,6 @@ def list_statistic_ids( ): continue - if device_class not in UNIT_CONVERTERS: - result[state.entity_id] = { - "has_mean": "mean" in provided_statistics, - "has_sum": "sum" in provided_statistics, - "name": None, - "source": RECORDER_DOMAIN, - "statistic_id": state.entity_id, - "unit_of_measurement": state_unit, - } - continue - - converter = UNIT_CONVERTERS[device_class] - if state_unit not in converter.VALID_UNITS: - continue - result[state.entity_id] = { "has_mean": "mean" in provided_statistics, "has_sum": "sum" in provided_statistics, @@ -643,6 +601,7 @@ def list_statistic_ids( "statistic_id": state.entity_id, "unit_of_measurement": state_unit, } + continue return result @@ -660,7 +619,6 @@ def validate_statistics( for state in sensor_states: entity_id = state.entity_id - device_class = state.attributes.get(ATTR_DEVICE_CLASS) state_class = state.attributes.get(ATTR_STATE_CLASS) state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) @@ -684,35 +642,30 @@ def validate_statistics( ) metadata_unit = metadata[1]["unit_of_measurement"] - if device_class not in UNIT_CONVERTERS: + converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit) + if not converter: if state_unit != metadata_unit: - # The unit has changed - issue_type = ( - "units_changed_can_convert" - if statistics.can_convert_units(metadata_unit, state_unit) - else "units_changed" - ) + # The unit has changed, and it's not possible to convert validation_result[entity_id].append( statistics.ValidationIssue( - issue_type, + "units_changed", { "statistic_id": entity_id, "state_unit": state_unit, "metadata_unit": metadata_unit, + "supported_unit": metadata_unit, }, ) ) - elif metadata_unit not in UNIT_CONVERTERS[device_class].VALID_UNITS: - # The unit in metadata is not supported for this device class - valid_units = ", ".join( - sorted(UNIT_CONVERTERS[device_class].VALID_UNITS) - ) + elif state_unit not in converter.VALID_UNITS: + # The state unit can't be converted to the unit in metadata + valid_units = ", ".join(sorted(converter.VALID_UNITS)) validation_result[entity_id].append( statistics.ValidationIssue( - "unsupported_unit_metadata", + "units_changed", { "statistic_id": entity_id, - "device_class": device_class, + "state_unit": state_unit, "metadata_unit": metadata_unit, "supported_unit": valid_units, }, @@ -728,23 +681,6 @@ def validate_statistics( ) ) - if ( - state_class in STATE_CLASSES - and device_class in UNIT_CONVERTERS - and state_unit not in UNIT_CONVERTERS[device_class].VALID_UNITS - ): - # The unit in the state is not supported for this device class - validation_result[entity_id].append( - statistics.ValidationIssue( - "unsupported_unit_state", - { - "statistic_id": entity_id, - "device_class": device_class, - "state_unit": state_unit, - }, - ) - ) - for statistic_id in sensor_statistic_ids - sensor_entity_ids: # There is no sensor matching the statistics_id validation_result[statistic_id].append( diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 99aa3a3bf8e..8d9e34d005f 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -238,8 +238,8 @@ def test_compile_hourly_statistics_purged_state_changes( @pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES]) -def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes): - """Test compiling hourly statistics for unsupported sensor.""" +def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes): + """Test compiling hourly statistics for sensor with unit not matching device class.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) @@ -286,6 +286,24 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes "statistics_unit_of_measurement": "°C", "unit_class": "temperature", }, + { + "has_mean": True, + "has_sum": False, + "name": None, + "source": "recorder", + "statistic_id": "sensor.test2", + "statistics_unit_of_measurement": "invalid", + "unit_class": None, + }, + { + "has_mean": True, + "has_sum": False, + "name": None, + "source": "recorder", + "statistic_id": "sensor.test3", + "statistics_unit_of_measurement": None, + "unit_class": None, + }, { "statistic_id": "sensor.test6", "has_mean": True, @@ -320,6 +338,32 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes "sum": None, } ], + "sensor.test2": [ + { + "statistic_id": "sensor.test2", + "start": process_timestamp_to_utc_isoformat(zero), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), + "mean": 13.05084745762712, + "min": -10.0, + "max": 30.0, + "last_reset": None, + "state": None, + "sum": None, + } + ], + "sensor.test3": [ + { + "statistic_id": "sensor.test3", + "start": process_timestamp_to_utc_isoformat(zero), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), + "mean": 13.05084745762712, + "min": -10.0, + "max": 30.0, + "last_reset": None, + "state": None, + "sum": None, + } + ], "sensor.test6": [ { "statistic_id": "sensor.test6", @@ -835,32 +879,44 @@ def test_compile_hourly_sum_statistics_nan_inf_state( @pytest.mark.parametrize( - "entity_id,warning_1,warning_2", + "entity_id, device_class, state_unit, display_unit, statistics_unit, unit_class, offset, warning_1, warning_2", [ ( "sensor.test1", + "energy", + "kWh", + "kWh", + "kWh", + "energy", + 0, "", "bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue", ), ( "sensor.power_consumption", + "power", + "W", + "W", + "W", + "power", + 15, "from integration demo ", "bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+demo%22", ), ( "sensor.custom_sensor", + "energy", + "kWh", + "kWh", + "kWh", + "energy", + 0, "from integration test ", "report it to the custom integration author", ), ], ) @pytest.mark.parametrize("state_class", ["total_increasing"]) -@pytest.mark.parametrize( - "device_class, state_unit, display_unit, statistics_unit, unit_class, factor", - [ - ("energy", "kWh", "kWh", "kWh", "energy", 1), - ], -) def test_compile_hourly_sum_statistics_negative_state( hass_recorder, caplog, @@ -873,7 +929,7 @@ def test_compile_hourly_sum_statistics_negative_state( display_unit, statistics_unit, unit_class, - factor, + offset, ): """Test compiling hourly statistics with negative states.""" zero = dt_util.utcnow() @@ -938,8 +994,8 @@ def test_compile_hourly_sum_statistics_negative_state( "mean": None, "min": None, "last_reset": None, - "state": approx(factor * seq[7]), - "sum": approx(factor * 15), # (15 - 10) + (10 - 0) + "state": approx(seq[7]), + "sum": approx(offset + 15), # (20 - 15) + (10 - 0) }, ] assert "Error while processing event StatisticsTask" not in caplog.text @@ -1889,7 +1945,7 @@ def test_compile_hourly_statistics_changing_units_1( do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) - assert "does not match the unit of already compiled" not in caplog.text + assert "can not be converted to the unit of previously" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { @@ -1922,8 +1978,8 @@ def test_compile_hourly_statistics_changing_units_1( do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert ( - "The unit of sensor.test1 (cats) does not match the unit of already compiled " - f"statistics ({display_unit})" in caplog.text + "The unit of sensor.test1 (cats) can not be converted to the unit of " + f"previously compiled statistics ({display_unit})" in caplog.text ) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -3039,18 +3095,30 @@ def record_states(hass, zero, entity_id, attributes, seq=None): @pytest.mark.parametrize( - "units, attributes, unit", + "units, attributes, unit, unit2, supported_unit", [ - (IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"), - (IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°F"), - (METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°C"), - (IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "psi"), - (METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "Pa"), + (IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°F", "K", "K, °C, °F"), + (METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°C", "K", "K, °C, °F"), + ( + IMPERIAL_SYSTEM, + PRESSURE_SENSOR_ATTRIBUTES, + "psi", + "bar", + "Pa, bar, cbar, hPa, inHg, kPa, mbar, mmHg, psi", + ), + ( + METRIC_SYSTEM, + PRESSURE_SENSOR_ATTRIBUTES, + "Pa", + "bar", + "Pa, bar, cbar, hPa, inHg, kPa, mbar, mmHg, psi", + ), ], ) -async def test_validate_statistics_supported_device_class( - hass, hass_ws_client, recorder_mock, units, attributes, unit +async def test_validate_statistics_unit_change_device_class( + hass, hass_ws_client, recorder_mock, units, attributes, unit, unit2, supported_unit ): """Test validate_statistics.""" id = 1 @@ -3078,39 +3146,40 @@ async def test_validate_statistics_supported_device_class( # No statistics, no state - empty response await assert_validation_result(client, {}) - # No statistics, valid state - empty response + # No statistics, unit in state matching device class - empty response hass.states.async_set( "sensor.test", 10, attributes={**attributes, **{"unit_of_measurement": unit}} ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) - # No statistics, invalid state - expect error + # No statistics, unit in state not matching device class - empty response hass.states.async_set( "sensor.test", 11, attributes={**attributes, **{"unit_of_measurement": "dogs"}} ) await async_recorder_block_till_done(hass) - expected = { - "sensor.test": [ - { - "data": { - "device_class": attributes["device_class"], - "state_unit": "dogs", - "statistic_id": "sensor.test", - }, - "type": "unsupported_unit_state", - } - ], - } - await assert_validation_result(client, expected) + await assert_validation_result(client, {}) - # Statistics has run, invalid state - expect error + # Statistics has run, incompatible unit - expect error await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( "sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "dogs"}} ) await async_recorder_block_till_done(hass) + expected = { + "sensor.test": [ + { + "data": { + "metadata_unit": unit, + "state_unit": "dogs", + "statistic_id": "sensor.test", + "supported_unit": supported_unit, + }, + "type": "units_changed", + } + ], + } await assert_validation_result(client, expected) # Valid state - empty response @@ -3125,6 +3194,18 @@ async def test_validate_statistics_supported_device_class( await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) + # Valid state in compatible unit - empty response + hass.states.async_set( + "sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": unit2}} + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Valid state, statistic runs again - empty response + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + # Remove the state - empty response hass.states.async_remove("sensor.test") expected = { @@ -3144,7 +3225,7 @@ async def test_validate_statistics_supported_device_class( (IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W, kW"), ], ) -async def test_validate_statistics_supported_device_class_2( +async def test_validate_statistics_unit_change_device_class_2( hass, hass_ws_client, recorder_mock, units, attributes, valid_units ): """Test validate_statistics.""" @@ -3173,56 +3254,144 @@ async def test_validate_statistics_supported_device_class_2( # No statistics, no state - empty response await assert_validation_result(client, {}) - # No statistics, valid state - empty response - initial_attributes = {"state_class": "measurement"} + # No statistics, no device class - empty response + initial_attributes = {"state_class": "measurement", "unit_of_measurement": "dogs"} hass.states.async_set("sensor.test", 10, attributes=initial_attributes) await hass.async_block_till_done() await assert_validation_result(client, {}) - # Statistics has run, device class set - expect error + # Statistics has run, device class set not matching unit - empty response do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", 12, attributes=attributes) - await hass.async_block_till_done() - expected = { - "sensor.test": [ - { - "data": { - "device_class": attributes["device_class"], - "metadata_unit": None, - "statistic_id": "sensor.test", - "supported_unit": valid_units, - }, - "type": "unsupported_unit_metadata", - } - ], - } - await assert_validation_result(client, expected) - - # Invalid state too, expect double errors hass.states.async_set( - "sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": "dogs"}} + "sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "dogs"}} + ) + await hass.async_block_till_done() + await assert_validation_result(client, {}) + + +@pytest.mark.parametrize( + "units, attributes, unit, unit2, supported_unit", + [ + (IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°F", "K", "K, °C, °F"), + (METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, "°C", "K", "K, °C, °F"), + ( + IMPERIAL_SYSTEM, + PRESSURE_SENSOR_ATTRIBUTES, + "psi", + "bar", + "Pa, bar, cbar, hPa, inHg, kPa, mbar, mmHg, psi", + ), + ( + METRIC_SYSTEM, + PRESSURE_SENSOR_ATTRIBUTES, + "Pa", + "bar", + "Pa, bar, cbar, hPa, inHg, kPa, mbar, mmHg, psi", + ), + ], +) +async def test_validate_statistics_unit_change_no_device_class( + hass, hass_ws_client, recorder_mock, units, attributes, unit, unit2, supported_unit +): + """Test validate_statistics.""" + id = 1 + attributes = dict(attributes) + attributes.pop("device_class") + + def next_id(): + nonlocal id + id += 1 + return id + + async def assert_validation_result(client, expected_result): + await client.send_json( + {"id": next_id(), "type": "recorder/validate_statistics"} + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + now = dt_util.utcnow() + + hass.config.units = units + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(client, {}) + + # No statistics, unit in state matching device class - empty response + hass.states.async_set( + "sensor.test", 10, attributes={**attributes, **{"unit_of_measurement": unit}} + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # No statistics, unit in state not matching device class - empty response + hass.states.async_set( + "sensor.test", 11, attributes={**attributes, **{"unit_of_measurement": "dogs"}} + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Statistics has run, incompatible unit - expect error + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + hass.states.async_set( + "sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "dogs"}} ) await async_recorder_block_till_done(hass) expected = { "sensor.test": [ { "data": { - "device_class": attributes["device_class"], - "metadata_unit": None, - "statistic_id": "sensor.test", - "supported_unit": valid_units, - }, - "type": "unsupported_unit_metadata", - }, - { - "data": { - "device_class": attributes["device_class"], + "metadata_unit": unit, "state_unit": "dogs", "statistic_id": "sensor.test", + "supported_unit": supported_unit, }, - "type": "unsupported_unit_state", - }, + "type": "units_changed", + } + ], + } + await assert_validation_result(client, expected) + + # Valid state - empty response + hass.states.async_set( + "sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": unit}} + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Valid state, statistic runs again - empty response + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Valid state in compatible unit - empty response + hass.states.async_set( + "sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": unit2}} + ) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Valid state, statistic runs again - empty response + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + await assert_validation_result(client, {}) + + # Remove the state - empty response + hass.states.async_remove("sensor.test") + expected = { + "sensor.test": [ + { + "data": {"statistic_id": "sensor.test"}, + "type": "no_state", + } ], } await assert_validation_result(client, expected) @@ -3473,7 +3642,7 @@ async def test_validate_statistics_sensor_removed( "attributes", [BATTERY_SENSOR_ATTRIBUTES, NONE_SENSOR_ATTRIBUTES], ) -async def test_validate_statistics_unsupported_device_class( +async def test_validate_statistics_unit_change_no_conversion( hass, recorder_mock, hass_ws_client, attributes ): """Test validate_statistics.""" @@ -3553,6 +3722,7 @@ async def test_validate_statistics_unsupported_device_class( "metadata_unit": "dogs", "state_unit": attributes.get("unit_of_measurement"), "statistic_id": "sensor.test", + "supported_unit": "dogs", }, "type": "units_changed", } @@ -3573,124 +3743,7 @@ async def test_validate_statistics_unsupported_device_class( await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) - # Remove the state - empty response - hass.states.async_remove("sensor.test") - expected = { - "sensor.test": [ - { - "data": {"statistic_id": "sensor.test"}, - "type": "no_state", - } - ], - } - await assert_validation_result(client, expected) - - -@pytest.mark.parametrize( - "attributes", - [KW_SENSOR_ATTRIBUTES], -) -async def test_validate_statistics_unsupported_device_class_2( - hass, recorder_mock, hass_ws_client, attributes -): - """Test validate_statistics.""" - id = 1 - - def next_id(): - nonlocal id - id += 1 - return id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i in range(len(db_states)): - assert db_states[i].statistic_id == expected_result[i]["statistic_id"] - assert ( - db_states[i].unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() - - await async_setup_component(hass, "sensor", {}) - await async_recorder_block_till_done(hass) - client = await hass_ws_client() - - # No statistics, no state - empty response - await assert_validation_result(client, {}) - - # No statistics, original unit - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) - await assert_validation_result(client, {}) - - # No statistics, changed unit - empty response - hass.states.async_set( - "sensor.test", 11, attributes={**attributes, **{"unit_of_measurement": "W"}} - ) - await assert_validation_result(client, {}) - - # Run statistics, no statistics will be generated because of conflicting units - await async_recorder_block_till_done(hass) - do_adhoc_statistics(hass, start=now) - await async_recorder_block_till_done(hass) - await assert_statistic_ids([]) - - # No statistics, changed unit - empty response - hass.states.async_set( - "sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "W"}} - ) - await assert_validation_result(client, {}) - - # Run statistics one hour later, only the "W" state will be considered - await async_recorder_block_till_done(hass) - do_adhoc_statistics(hass, start=now + timedelta(hours=1)) - await async_recorder_block_till_done(hass) - await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": "W"}] - ) - await assert_validation_result(client, {}) - - # Change back to original unit - expect error - hass.states.async_set("sensor.test", 13, attributes=attributes) - await async_recorder_block_till_done(hass) - expected = { - "sensor.test": [ - { - "data": { - "metadata_unit": "W", - "state_unit": "kW", - "statistic_id": "sensor.test", - }, - "type": "units_changed_can_convert", - } - ], - } - await assert_validation_result(client, expected) - - # Changed unit - empty response - hass.states.async_set( - "sensor.test", 14, attributes={**attributes, **{"unit_of_measurement": "W"}} - ) - await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) - - # Valid state, statistic runs again - empty response - await async_recorder_block_till_done(hass) - do_adhoc_statistics(hass, start=now) - await async_recorder_block_till_done(hass) - await assert_validation_result(client, {}) - - # Remove the state - empty response + # Remove the state - expect error hass.states.async_remove("sensor.test") expected = { "sensor.test": [ From 5f3774219704c10523bbe6817ec88745a2cc30c8 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Thu, 6 Oct 2022 14:02:24 -0400 Subject: [PATCH 2/7] ZHA radio migration: reset the old adapter (#79663) --- homeassistant/components/zha/config_flow.py | 84 ++++++++-- homeassistant/components/zha/strings.json | 16 ++ .../components/zha/translations/en.json | 39 ++--- tests/components/zha/test_config_flow.py | 148 ++++++++++++++---- 4 files changed, 219 insertions(+), 68 deletions(-) diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index ce2080e4a13..85f03b9f1f5 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -1,6 +1,7 @@ """Config flow for ZHA.""" from __future__ import annotations +import asyncio import collections import contextlib import copy @@ -65,8 +66,16 @@ FORMATION_UPLOAD_MANUAL_BACKUP = "upload_manual_backup" CHOOSE_AUTOMATIC_BACKUP = "choose_automatic_backup" OVERWRITE_COORDINATOR_IEEE = "overwrite_coordinator_ieee" +OPTIONS_INTENT_MIGRATE = "intent_migrate" +OPTIONS_INTENT_RECONFIGURE = "intent_reconfigure" + UPLOADED_BACKUP_FILE = "uploaded_backup_file" +DEFAULT_ZHA_ZEROCONF_PORT = 6638 +ESPHOME_API_PORT = 6053 + +CONNECT_DELAY_S = 1.0 + _LOGGER = logging.getLogger(__name__) @@ -159,6 +168,7 @@ class BaseZhaFlow(FlowHandler): yield app finally: await app.disconnect() + await asyncio.sleep(CONNECT_DELAY_S) async def _restore_backup( self, backup: zigpy.backups.NetworkBackup, **kwargs: Any @@ -628,14 +638,21 @@ class ZhaConfigFlowHandler(BaseZhaFlow, config_entries.ConfigFlow, domain=DOMAIN # Hostname is format: livingroom.local. local_name = discovery_info.hostname[:-1] - radio_type = discovery_info.properties.get("radio_type") or local_name + port = discovery_info.port or DEFAULT_ZHA_ZEROCONF_PORT + + # Fix incorrect port for older TubesZB devices + if "tube" in local_name and port == ESPHOME_API_PORT: + port = DEFAULT_ZHA_ZEROCONF_PORT + + if "radio_type" in discovery_info.properties: + self._radio_type = RadioType[discovery_info.properties["radio_type"]] + elif "efr32" in local_name: + self._radio_type = RadioType.ezsp + else: + self._radio_type = RadioType.znp + node_name = local_name[: -len(".local")] - host = discovery_info.host - port = discovery_info.port - if local_name.startswith("tube") or "efr32" in local_name: - # This is hard coded to work with legacy devices - port = 6638 - device_path = f"socket://{host}:{port}" + device_path = f"socket://{discovery_info.host}:{port}" if current_entry := await self.async_set_unique_id(node_name): self._abort_if_unique_id_configured( @@ -651,13 +668,6 @@ class ZhaConfigFlowHandler(BaseZhaFlow, config_entries.ConfigFlow, domain=DOMAIN self._title = device_path self._device_path = device_path - if "efr32" in radio_type: - self._radio_type = RadioType.ezsp - elif "zigate" in radio_type: - self._radio_type = RadioType.zigate - else: - self._radio_type = RadioType.znp - return await self.async_step_confirm() async def async_step_hardware( @@ -720,10 +730,54 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, config_entries.OptionsFlow): # ZHA is not running pass - return await self.async_step_choose_serial_port() + return await self.async_step_prompt_migrate_or_reconfigure() return self.async_show_form(step_id="init") + async def async_step_prompt_migrate_or_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Confirm if we are migrating adapters or just re-configuring.""" + + return self.async_show_menu( + step_id="prompt_migrate_or_reconfigure", + menu_options=[ + OPTIONS_INTENT_RECONFIGURE, + OPTIONS_INTENT_MIGRATE, + ], + ) + + async def async_step_intent_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Virtual step for when the user is reconfiguring the integration.""" + return await self.async_step_choose_serial_port() + + async def async_step_intent_migrate( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Confirm the user wants to reset their current radio.""" + + if user_input is not None: + # Reset the current adapter + async with self._connect_zigpy_app() as app: + await app.reset_network_info() + + return await self.async_step_instruct_unplug() + + return self.async_show_form(step_id="intent_migrate") + + async def async_step_instruct_unplug( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Instruct the user to unplug the current radio, if possible.""" + + if user_input is not None: + # Now that the old radio is gone, we can scan for serial ports again + return await self.async_step_choose_serial_port() + + return self.async_show_form(step_id="instruct_unplug") + async def _async_create_radio_entity(self): """Re-implementation of the base flow's final step to update the config.""" device_settings = self._device_settings.copy() diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 3901f9f9439..240f3c4ee83 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -76,6 +76,22 @@ "title": "Reconfigure ZHA", "description": "ZHA will be stopped. Do you wish to continue?" }, + "prompt_migrate_or_reconfigure": { + "title": "Migrate or re-configure", + "description": "Are you migrating to a new radio or re-configuring the current radio?", + "menu_options": { + "intent_migrate": "Migrate to a new radio", + "intent_reconfigure": "Re-configure the current radio" + } + }, + "intent_migrate": { + "title": "Migrate to a new radio", + "description": "Your old radio will be factory reset. If you are using a combined Z-Wave and Zigbee adapter like the HUSBZB-1, this will only reset the Zigbee portion.\n\nDo you wish to continue?" + }, + "instruct_unplug": { + "title": "Unplug your old radio", + "description": "Your old radio has been reset. If the hardware is no longer needed, you can now unplug it." + }, "choose_serial_port": { "title": "[%key:component::zha::config::step::choose_serial_port::title%]", "data": { diff --git a/homeassistant/components/zha/translations/en.json b/homeassistant/components/zha/translations/en.json index adf89983256..68d36b7fac7 100644 --- a/homeassistant/components/zha/translations/en.json +++ b/homeassistant/components/zha/translations/en.json @@ -64,35 +64,12 @@ "description": "Your backup has a different IEEE address than your radio. For your network to function properly, the IEEE address of your radio should also be changed.\n\nThis is a permanent operation.", "title": "Overwrite Radio IEEE Address" }, - "pick_radio": { - "data": { - "radio_type": "Radio Type" - }, - "description": "Pick a type of your Zigbee radio", - "title": "Radio Type" - }, - "port_config": { - "data": { - "baudrate": "port speed", - "flow_control": "data flow control", - "path": "Serial device path" - }, - "description": "Enter port specific settings", - "title": "Settings" - }, "upload_manual_backup": { "data": { "uploaded_backup_file": "Upload a file" }, "description": "Restore your network settings from an uploaded backup JSON file. You can download one from a different ZHA installation from **Network Settings**, or use a Zigbee2MQTT `coordinator_backup.json` file.", "title": "Upload a Manual Backup" - }, - "user": { - "data": { - "path": "Serial Device Path" - }, - "description": "Select serial port for Zigbee radio", - "title": "ZHA" } } }, @@ -212,6 +189,14 @@ "description": "ZHA will be stopped. Do you wish to continue?", "title": "Reconfigure ZHA" }, + "instruct_unplug": { + "description": "Your old radio has been reset. If the hardware is no longer needed, you can now unplug it.", + "title": "Unplug your old radio" + }, + "intent_migrate": { + "description": "Your old radio will be factory reset. If you are using a combined Z-Wave and Zigbee adapter like the HUSBZB-1, this will only reset the Zigbee portion.\n\nDo you wish to continue?", + "title": "Migrate to a new radio" + }, "manual_pick_radio_type": { "data": { "radio_type": "Radio Type" @@ -235,6 +220,14 @@ "description": "Your backup has a different IEEE address than your radio. For your network to function properly, the IEEE address of your radio should also be changed.\n\nThis is a permanent operation.", "title": "Overwrite Radio IEEE Address" }, + "prompt_migrate_or_reconfigure": { + "description": "Are you migrating to a new radio or re-configuring the current radio?", + "menu_options": { + "intent_migrate": "Migrate to a new radio", + "intent_reconfigure": "Re-configure the current radio" + }, + "title": "Migrate or re-configure" + }, "upload_manual_backup": { "data": { "uploaded_backup_file": "Upload a file" diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 5fc4b232634..725f9cc0917 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -46,6 +46,13 @@ def disable_platform_only(): yield +@pytest.fixture(autouse=True) +def reduce_reconnect_timeout(): + """Reduces reconnect timeout to speed up tests.""" + with patch("homeassistant.components.zha.config_flow.CONNECT_DELAY_S", 0.01): + yield + + @pytest.fixture(autouse=True) def mock_app(): """Mock zigpy app interface.""" @@ -230,10 +237,10 @@ async def test_efr32_via_zeroconf(hass): await hass.async_block_till_done() assert result3["type"] == FlowResultType.CREATE_ENTRY - assert result3["title"] == "socket://192.168.1.200:6638" + assert result3["title"] == "socket://192.168.1.200:1234" assert result3["data"] == { CONF_DEVICE: { - CONF_DEVICE_PATH: "socket://192.168.1.200:6638", + CONF_DEVICE_PATH: "socket://192.168.1.200:1234", CONF_BAUDRATE: 115200, CONF_FLOWCONTROL: "software", }, @@ -1476,21 +1483,28 @@ async def test_options_flow_defaults(async_setup_entry, async_unload_effect, has # Unload it ourselves entry.state = config_entries.ConfigEntryState.NOT_LOADED + # Reconfigure ZHA + assert result1["step_id"] == "prompt_migrate_or_reconfigure" + result2 = await hass.config_entries.options.async_configure( + flow["flow_id"], + user_input={"next_step_id": config_flow.OPTIONS_INTENT_RECONFIGURE}, + ) + # Current path is the default - assert result1["step_id"] == "choose_serial_port" - assert "/dev/ttyUSB0" in result1["data_schema"]({})[CONF_DEVICE_PATH] + assert result2["step_id"] == "choose_serial_port" + assert "/dev/ttyUSB0" in result2["data_schema"]({})[CONF_DEVICE_PATH] # Autoprobing fails, we have to manually choose the radio type - result2 = await hass.config_entries.options.async_configure( + result3 = await hass.config_entries.options.async_configure( flow["flow_id"], user_input={} ) # Current radio type is the default - assert result2["step_id"] == "manual_pick_radio_type" - assert result2["data_schema"]({})[CONF_RADIO_TYPE] == RadioType.znp.description + assert result3["step_id"] == "manual_pick_radio_type" + assert result3["data_schema"]({})[CONF_RADIO_TYPE] == RadioType.znp.description # Continue on to port settings - result3 = await hass.config_entries.options.async_configure( + result4 = await hass.config_entries.options.async_configure( flow["flow_id"], user_input={ CONF_RADIO_TYPE: RadioType.znp.description, @@ -1498,12 +1512,12 @@ async def test_options_flow_defaults(async_setup_entry, async_unload_effect, has ) # The defaults match our current settings - assert result3["step_id"] == "manual_port_config" - assert result3["data_schema"]({}) == entry.data[CONF_DEVICE] + assert result4["step_id"] == "manual_port_config" + assert result4["data_schema"]({}) == entry.data[CONF_DEVICE] with patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)): # Change the serial port path - result4 = await hass.config_entries.options.async_configure( + result5 = await hass.config_entries.options.async_configure( flow["flow_id"], user_input={ # Change everything @@ -1514,18 +1528,18 @@ async def test_options_flow_defaults(async_setup_entry, async_unload_effect, has ) # The radio has been detected, we can move on to creating the config entry - assert result4["step_id"] == "choose_formation_strategy" + assert result5["step_id"] == "choose_formation_strategy" async_setup_entry.assert_not_called() - result5 = await hass.config_entries.options.async_configure( + result6 = await hass.config_entries.options.async_configure( result1["flow_id"], user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, ) await hass.async_block_till_done() - assert result5["type"] == FlowResultType.CREATE_ENTRY - assert result5["data"] == {} + assert result6["type"] == FlowResultType.CREATE_ENTRY + assert result6["data"] == {} # The updated entry contains correct settings assert entry.data == { @@ -1581,33 +1595,39 @@ async def test_options_flow_defaults_socket(hass): flow["flow_id"], user_input={} ) - # Radio path must be manually entered - assert result1["step_id"] == "choose_serial_port" - assert result1["data_schema"]({})[CONF_DEVICE_PATH] == config_flow.CONF_MANUAL_PATH - + assert result1["step_id"] == "prompt_migrate_or_reconfigure" result2 = await hass.config_entries.options.async_configure( - flow["flow_id"], user_input={} + flow["flow_id"], + user_input={"next_step_id": config_flow.OPTIONS_INTENT_RECONFIGURE}, ) - # Current radio type is the default - assert result2["step_id"] == "manual_pick_radio_type" - assert result2["data_schema"]({})[CONF_RADIO_TYPE] == RadioType.znp.description + # Radio path must be manually entered + assert result2["step_id"] == "choose_serial_port" + assert result2["data_schema"]({})[CONF_DEVICE_PATH] == config_flow.CONF_MANUAL_PATH - # Continue on to port settings result3 = await hass.config_entries.options.async_configure( flow["flow_id"], user_input={} ) + # Current radio type is the default + assert result3["step_id"] == "manual_pick_radio_type" + assert result3["data_schema"]({})[CONF_RADIO_TYPE] == RadioType.znp.description + + # Continue on to port settings + result4 = await hass.config_entries.options.async_configure( + flow["flow_id"], user_input={} + ) + # The defaults match our current settings - assert result3["step_id"] == "manual_port_config" - assert result3["data_schema"]({}) == entry.data[CONF_DEVICE] + assert result4["step_id"] == "manual_port_config" + assert result4["data_schema"]({}) == entry.data[CONF_DEVICE] with patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)): - result4 = await hass.config_entries.options.async_configure( + result5 = await hass.config_entries.options.async_configure( flow["flow_id"], user_input={} ) - assert result4["step_id"] == "choose_formation_strategy" + assert result5["step_id"] == "choose_formation_strategy" @patch("homeassistant.components.zha.async_setup_entry", return_value=True) @@ -1643,14 +1663,82 @@ async def test_options_flow_restarts_running_zha_if_cancelled(async_setup_entry, entry.state = config_entries.ConfigEntryState.NOT_LOADED + assert result1["step_id"] == "prompt_migrate_or_reconfigure" + result2 = await hass.config_entries.options.async_configure( + flow["flow_id"], + user_input={"next_step_id": config_flow.OPTIONS_INTENT_RECONFIGURE}, + ) + # Radio path must be manually entered - assert result1["step_id"] == "choose_serial_port" + assert result2["step_id"] == "choose_serial_port" async_setup_entry.reset_mock() # Abort the flow - hass.config_entries.options.async_abort(result1["flow_id"]) + hass.config_entries.options.async_abort(result2["flow_id"]) await hass.async_block_till_done() # ZHA was set up once more async_setup_entry.assert_called_once_with(hass, entry) + + +@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) +async def test_options_flow_migration_reset_old_adapter(hass, mock_app): + """Test options flow for migrating from an old radio.""" + + entry = MockConfigEntry( + version=config_flow.ZhaConfigFlowHandler.VERSION, + domain=DOMAIN, + data={ + CONF_DEVICE: { + CONF_DEVICE_PATH: "/dev/serial/by-id/old_radio", + CONF_BAUDRATE: 12345, + CONF_FLOWCONTROL: None, + }, + CONF_RADIO_TYPE: "znp", + }, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + flow = await hass.config_entries.options.async_init(entry.entry_id) + + # ZHA gets unloaded + with patch( + "homeassistant.config_entries.ConfigEntries.async_unload", return_value=True + ): + result1 = await hass.config_entries.options.async_configure( + flow["flow_id"], user_input={} + ) + + entry.state = config_entries.ConfigEntryState.NOT_LOADED + + assert result1["step_id"] == "prompt_migrate_or_reconfigure" + result2 = await hass.config_entries.options.async_configure( + flow["flow_id"], + user_input={"next_step_id": config_flow.OPTIONS_INTENT_MIGRATE}, + ) + + # User must explicitly approve radio reset + assert result2["step_id"] == "intent_migrate" + + mock_app.reset_network_info = AsyncMock() + + result3 = await hass.config_entries.options.async_configure( + flow["flow_id"], + user_input={}, + ) + + mock_app.reset_network_info.assert_awaited_once() + + # Now we can unplug the old radio + assert result3["step_id"] == "instruct_unplug" + + # And move on to choosing the new radio + result4 = await hass.config_entries.options.async_configure( + flow["flow_id"], + user_input={}, + ) + assert result4["step_id"] == "choose_serial_port" From 3bd4d66b2ebce1a70a613728de0c46b18f850102 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 5 Oct 2022 16:32:29 -1000 Subject: [PATCH 3/7] Fix bluetooth diagnostics on macos (#79680) * Fix bluetooth diagnostics on macos The pyobjc objects cannot be pickled which cases dataclasses asdict to raise an exception when trying to do the deepcopy We now implement our own as_dict to avoid this problem * add cover --- homeassistant/components/bluetooth/manager.py | 5 +-- homeassistant/components/bluetooth/models.py | 19 +++++++++ .../components/bluetooth/test_diagnostics.py | 40 +++++++++++++++++-- 3 files changed, 58 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/bluetooth/manager.py b/homeassistant/components/bluetooth/manager.py index 37c24423231..f0152f5ae5e 100644 --- a/homeassistant/components/bluetooth/manager.py +++ b/homeassistant/components/bluetooth/manager.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Iterable -from dataclasses import asdict from datetime import datetime, timedelta import itertools import logging @@ -185,11 +184,11 @@ class BluetoothManager: "adapters": self._adapters, "scanners": scanner_diagnostics, "connectable_history": [ - asdict(service_info) + service_info.as_dict() for service_info in self._connectable_history.values() ], "history": [ - asdict(service_info) for service_info in self._history.values() + service_info.as_dict() for service_info in self._history.values() ], } diff --git a/homeassistant/components/bluetooth/models.py b/homeassistant/components/bluetooth/models.py index d93f8efc1e2..9e93ea4d142 100644 --- a/homeassistant/components/bluetooth/models.py +++ b/homeassistant/components/bluetooth/models.py @@ -53,6 +53,25 @@ class BluetoothServiceInfoBleak(BluetoothServiceInfo): connectable: bool time: float + def as_dict(self) -> dict[str, Any]: + """Return as dict. + + The dataclass asdict method is not used because + it will try to deepcopy pyobjc data which will fail. + """ + return { + "name": self.name, + "address": self.address, + "rssi": self.rssi, + "manufacturer_data": self.manufacturer_data, + "service_data": self.service_data, + "service_uuids": self.service_uuids, + "source": self.source, + "advertisement": self.advertisement, + "connectable": self.connectable, + "time": self.time, + } + class BluetoothScanningMode(Enum): """The mode of scanning for bluetooth devices.""" diff --git a/tests/components/bluetooth/test_diagnostics.py b/tests/components/bluetooth/test_diagnostics.py index d641cae9c7c..1da071a76ab 100644 --- a/tests/components/bluetooth/test_diagnostics.py +++ b/tests/components/bluetooth/test_diagnostics.py @@ -3,11 +3,13 @@ from unittest.mock import ANY, patch -from bleak.backends.scanner import BLEDevice +from bleak.backends.scanner import AdvertisementData, BLEDevice from homeassistant.components import bluetooth from homeassistant.components.bluetooth.const import DEFAULT_ADDRESS +from . import inject_advertisement + from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -158,6 +160,10 @@ async def test_diagnostics_macos( # because we cannot import the scanner class directly without it throwing an # error if the test is not running on linux since we won't have the correct # deps installed when testing on MacOS. + switchbot_device = BLEDevice("44:44:33:11:23:45", "wohand") + switchbot_adv = AdvertisementData( + local_name="wohand", service_uuids=[], manufacturer_data={1: b"\x01"} + ) with patch( "homeassistant.components.bluetooth.scanner.HaScanner.discovered_devices", @@ -180,6 +186,8 @@ async def test_diagnostics_macos( assert await hass.config_entries.async_setup(entry1.entry_id) await hass.async_block_till_done() + inject_advertisement(hass, switchbot_device, switchbot_adv) + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry1) assert diag == { "adapters": { @@ -197,8 +205,34 @@ async def test_diagnostics_macos( "sw_version": ANY, } }, - "connectable_history": [], - "history": [], + "connectable_history": [ + { + "address": "44:44:33:11:23:45", + "advertisement": ANY, + "connectable": True, + "manufacturer_data": ANY, + "name": "wohand", + "rssi": 0, + "service_data": {}, + "service_uuids": [], + "source": "local", + "time": ANY, + } + ], + "history": [ + { + "address": "44:44:33:11:23:45", + "advertisement": ANY, + "connectable": True, + "manufacturer_data": ANY, + "name": "wohand", + "rssi": 0, + "service_data": {}, + "service_uuids": [], + "source": "local", + "time": ANY, + } + ], "scanners": [ { "adapter": "Core Bluetooth", From 998d13499ce72b6a91966260042d5aa430199217 Mon Sep 17 00:00:00 2001 From: Matthew Simpson Date: Thu, 6 Oct 2022 16:01:27 +0100 Subject: [PATCH 4/7] Bump btsmarthub_devicelist to 0.2.3 (#79705) * Bump btsmarthub_devicelist This PR bumps the btsmarthub_devicelist version to correct an issue experienced by a recent firmware upgrade to the SmartHub2. * Bump btsmarthub_devicelist to 0.2.3 This version bump fixes an issue where BT SmartHub2 devices cannot be correctly autodetected. The current workaround is to specifiy it manually, which isn't great UX (and did previously work until a recent firmware upgrade). I've also taken the opportunity to reassign ownership of the component to myself as @jxwolstenholme no longer has a SmartHub so cannot do manual testing and also has no need to use the component anymore. --- CODEOWNERS | 2 +- homeassistant/components/bt_smarthub/manifest.json | 4 ++-- requirements_all.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 5c39337af74..a01d358208b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -162,7 +162,7 @@ build.json @home-assistant/supervisor /tests/components/brunt/ @eavanvalkenburg /homeassistant/components/bsblan/ @liudger /tests/components/bsblan/ @liudger -/homeassistant/components/bt_smarthub/ @jxwolstenholme +/homeassistant/components/bt_smarthub/ @typhoon2099 /homeassistant/components/bthome/ @Ernst79 /tests/components/bthome/ @Ernst79 /homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221 diff --git a/homeassistant/components/bt_smarthub/manifest.json b/homeassistant/components/bt_smarthub/manifest.json index fb34117eb6b..4519ee517c3 100644 --- a/homeassistant/components/bt_smarthub/manifest.json +++ b/homeassistant/components/bt_smarthub/manifest.json @@ -2,8 +2,8 @@ "domain": "bt_smarthub", "name": "BT Smart Hub", "documentation": "https://www.home-assistant.io/integrations/bt_smarthub", - "requirements": ["btsmarthub_devicelist==0.2.2"], - "codeowners": ["@jxwolstenholme"], + "requirements": ["btsmarthub_devicelist==0.2.3"], + "codeowners": ["@typhoon2099"], "iot_class": "local_polling", "loggers": ["btsmarthub_devicelist"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1359a9c1b45..f2060f0c908 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -475,7 +475,7 @@ bthome-ble==1.2.2 bthomehub5-devicelist==0.1.1 # homeassistant.components.bt_smarthub -btsmarthub_devicelist==0.2.2 +btsmarthub_devicelist==0.2.3 # homeassistant.components.buienradar buienradar==1.0.5 From 35a69cb2532d4494f78f17024168d8afaf69a0a1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 6 Oct 2022 20:01:54 +0200 Subject: [PATCH 5/7] Correct how unit used for statistics is determined (#79725) --- homeassistant/components/sensor/recorder.py | 35 ++++++++++++--------- tests/components/sensor/test_recorder.py | 24 +++++++------- 2 files changed, 33 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 1a72444c758..beae06f78ff 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -149,13 +149,20 @@ def _normalize_states( state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) - if state_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER or ( - old_metadata - and old_metadata["unit_of_measurement"] - not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER + statistics_unit: str | None + if not old_metadata: + # We've not seen this sensor before, the first valid state determines the unit + # used for statistics + statistics_unit = state_unit + else: + # We have seen this sensor before, use the unit from metadata + statistics_unit = old_metadata["unit_of_measurement"] + + if ( + not statistics_unit + or statistics_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER ): - # We're either not normalizing this device class or this entity is not stored - # in a unit which can be converted, return the states as they are + # The unit used by this sensor doesn't support unit conversion all_units = _get_units(fstates) if len(all_units) > 1: @@ -182,13 +189,9 @@ def _normalize_states( state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) return state_unit, state_unit, fstates - converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[state_unit] + converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit] valid_fstates: list[tuple[float, State]] = [] - statistics_unit: str | None = None - if old_metadata: - statistics_unit = old_metadata["unit_of_measurement"] - for fstate, state in fstates: state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) # Exclude states with unsupported unit from statistics @@ -198,14 +201,18 @@ def _normalize_states( if entity_id not in hass.data[WARN_UNSUPPORTED_UNIT]: hass.data[WARN_UNSUPPORTED_UNIT].add(entity_id) _LOGGER.warning( - "%s has unit %s which can't be converted to %s", + "The unit of %s (%s) can not be converted to the unit of previously " + "compiled statistics (%s). Generation of long term statistics " + "will be suppressed unless the unit changes back to %s or a " + "compatible unit. " + "Go to %s to fix this", entity_id, state_unit, statistics_unit, + statistics_unit, + LINK_DEV_STATISTICS, ) continue - if statistics_unit is None: - statistics_unit = state_unit valid_fstates.append( ( diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 8d9e34d005f..0a72dcf6fcd 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1900,12 +1900,13 @@ def test_list_statistic_ids_unsupported(hass_recorder, caplog, _attributes): @pytest.mark.parametrize( - "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max", + "device_class, state_unit, state_unit2, unit_class, mean, min, max", [ - (None, None, None, None, None, 13.050847, -10, 30), - (None, "%", "%", "%", None, 13.050847, -10, 30), - ("battery", "%", "%", "%", None, 13.050847, -10, 30), - ("battery", None, None, None, None, 13.050847, -10, 30), + (None, None, "cats", None, 13.050847, -10, 30), + (None, "%", "cats", None, 13.050847, -10, 30), + ("battery", "%", "cats", None, 13.050847, -10, 30), + ("battery", None, "cats", None, 13.050847, -10, 30), + (None, "kW", "Wh", "power", 13.050847, -10, 30), ], ) def test_compile_hourly_statistics_changing_units_1( @@ -1913,8 +1914,7 @@ def test_compile_hourly_statistics_changing_units_1( caplog, device_class, state_unit, - display_unit, - statistics_unit, + state_unit2, unit_class, mean, min, @@ -1931,7 +1931,7 @@ def test_compile_hourly_statistics_changing_units_1( "unit_of_measurement": state_unit, } four, states = record_states(hass, zero, "sensor.test1", attributes) - attributes["unit_of_measurement"] = "cats" + attributes["unit_of_measurement"] = state_unit2 four, _states = record_states( hass, zero + timedelta(minutes=5), "sensor.test1", attributes ) @@ -1954,7 +1954,7 @@ def test_compile_hourly_statistics_changing_units_1( "has_sum": False, "name": None, "source": "recorder", - "statistics_unit_of_measurement": statistics_unit, + "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] @@ -1978,8 +1978,8 @@ def test_compile_hourly_statistics_changing_units_1( do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert ( - "The unit of sensor.test1 (cats) can not be converted to the unit of " - f"previously compiled statistics ({display_unit})" in caplog.text + f"The unit of sensor.test1 ({state_unit2}) can not be converted to the unit of " + f"previously compiled statistics ({state_unit})" in caplog.text ) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -1989,7 +1989,7 @@ def test_compile_hourly_statistics_changing_units_1( "has_sum": False, "name": None, "source": "recorder", - "statistics_unit_of_measurement": statistics_unit, + "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] From aacae6b9bf759888c5bc1e7c06cdf569669a3c07 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 6 Oct 2022 20:01:18 +0200 Subject: [PATCH 6/7] Update frontend to 20221006.0 (#79745) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index e6d5f63272d..6f243da444a 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -2,7 +2,7 @@ "domain": "frontend", "name": "Home Assistant Frontend", "documentation": "https://www.home-assistant.io/integrations/frontend", - "requirements": ["home-assistant-frontend==20221005.0"], + "requirements": ["home-assistant-frontend==20221006.0"], "dependencies": [ "api", "auth", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 2f637ba61f1..f493034171f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -21,7 +21,7 @@ dbus-fast==1.24.0 fnvhash==0.1.0 hass-nabucasa==0.56.0 home-assistant-bluetooth==1.3.0 -home-assistant-frontend==20221005.0 +home-assistant-frontend==20221006.0 httpx==0.23.0 ifaddr==0.1.7 jinja2==3.1.2 diff --git a/requirements_all.txt b/requirements_all.txt index f2060f0c908..caf9c2209bb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -865,7 +865,7 @@ hole==0.7.0 holidays==0.16 # homeassistant.components.frontend -home-assistant-frontend==20221005.0 +home-assistant-frontend==20221006.0 # homeassistant.components.home_connect homeconnect==0.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ba5ca682243..33927e272e1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -645,7 +645,7 @@ hole==0.7.0 holidays==0.16 # homeassistant.components.frontend -home-assistant-frontend==20221005.0 +home-assistant-frontend==20221006.0 # homeassistant.components.home_connect homeconnect==0.7.2 From 570270b9ea71fcd576129be74add4e1d9212e73c Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 6 Oct 2022 14:13:51 -0400 Subject: [PATCH 7/7] Bumped version to 2022.10.1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 139b3a157b2..cb7eb689b2d 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -8,7 +8,7 @@ from .backports.enum import StrEnum APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2022 MINOR_VERSION: Final = 10 -PATCH_VERSION: Final = "0" +PATCH_VERSION: Final = "1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0) diff --git a/pyproject.toml b/pyproject.toml index 8152dd57cfc..287e1c6d627 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2022.10.0" +version = "2022.10.1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst"